├── .coveralls.yml
├── .github
└── workflows
│ └── codeql-analysis.yml
├── .gitignore
├── .npmignore
├── .nvmrc
├── .travis.yml
├── README.md
├── assets
└── img
│ └── mediamachine-logo.png
├── jest.config.js
├── package-lock.json
├── package.json
├── src
├── Executable.ts
├── Newable.ts
├── WorkerConfig.ts
├── WorkerTarget.ts
├── api.ts
├── blob.ts
├── index.test.ts
├── index.ts
├── job.ts
├── summary.ts
├── thumbnail.ts
├── transcode.ts
├── utils.ts
├── watermark.ts
└── webhooks.ts
├── tsconfig.json
└── yarn.lock
/.coveralls.yml:
--------------------------------------------------------------------------------
1 | service_name: travis-pro
2 | repo_token: kUNX3DYuLeTYjPlnzdqfpeOTPZqme062l
3 |
4 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ master ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ master ]
20 | schedule:
21 | - cron: '26 3 * * 1'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 |
28 | strategy:
29 | fail-fast: false
30 | matrix:
31 | language: [ 'javascript' ]
32 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
33 | # Learn more:
34 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
35 |
36 | steps:
37 | - name: Checkout repository
38 | uses: actions/checkout@v2
39 |
40 | # Initializes the CodeQL tools for scanning.
41 | - name: Initialize CodeQL
42 | uses: github/codeql-action/init@v1
43 | with:
44 | languages: ${{ matrix.language }}
45 | # If you wish to specify custom queries, you can do so here or in a config file.
46 | # By default, queries listed here will override any specified in a config file.
47 | # Prefix the list here with "+" to use these queries and those in the config file.
48 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
49 |
50 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
51 | # If this step fails, then you should remove it and run the build manually (see below)
52 | - name: Autobuild
53 | uses: github/codeql-action/autobuild@v1
54 |
55 | # ℹ️ Command-line programs to run using the OS shell.
56 | # 📚 https://git.io/JvXDl
57 |
58 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
59 | # and modify them (or add more) to build your code if your project
60 | # uses a compiled language
61 |
62 | #- run: |
63 | # make bootstrap
64 | # make release
65 |
66 | - name: Perform CodeQL Analysis
67 | uses: github/codeql-action/analyze@v1
68 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 | coverage/
3 | lib/
4 | .idea/
5 | .env
6 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | src/
2 | .idea/
3 | jest.config.jest
4 | tsconfig.json
5 | .nvmrc
6 | .env
7 |
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | 14.15
2 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | This library will let you use [MediaMachine](https://mediamachine.io)'s api to:
4 |
5 | - Transcode a video to a different format
6 | - Generate a thumbnail image from a video
7 | - Generate a summary from a video in gif or mp4 format
8 |
9 | [](https://badge.fury.io/js/mediamachine) [](https://packagephobia.com/result?p=mediamachine) [](https://travis-ci.com/stackrock/mediamachinejs) [](https://coveralls.io/github/stackrock/mediamachinejs?branch=master)
10 |
11 | ## Installation
12 |
13 | ```
14 | $ npm install mediamachine
15 | ```
16 |
17 | ## Usage
18 |
19 | First import and create a mediamachine client:
20 |
21 | ```javascript
22 | import { MediaMachine } from "mediamachine";
23 | const MEDIAMACHINE_API_KEY = "your mediamachine api key here";
24 | const mediaMachine = new MediaMachine(MEDIAMACHINE_API_KEY);
25 | ```
26 |
27 | Each type of request (`thumbnail()`, `transcodeToMp4()`, `transcodeToWebm()` and `summary()`) creates and returns a [Job](#job) object that you can use to query the state of that Job.
28 |
29 | Input for any of the services can come from any of the following:
30 |
31 | - URL using `fromUrl()`
32 | - Amazon S3 using `fromS3()`
33 | - Google GCP using `fromGCloud()`
34 | - Microsoft Azure buckets using `fromAzure()`
35 |
36 | Also, each service type can store the output in any of the following:
37 |
38 | - Amazon S3 using `toS3()`
39 | - Google GCP using `toGCloud()`
40 | - Microsoft Azure buckets using `toAzure()`
41 | - URL (We `POST` to that URL when the output is ready) using `toUrl()`
42 |
43 | Additionally, a request to any service can accept a success and failure endpoint, that will be called with the output of the process once it’s done.
44 |
45 | ### thumbnail()
46 |
47 | The `thumbnail()` method uses a smart algorithm to automatically choose the best frame of a video, and additionally allows you to scale and watermark it.
48 |
49 | This method takes a single argument of the following optional inputs:
50 |
51 | * `width` : number representing the desired width of the thumbnail (default: 720 px).
52 | * `watermark` : a [Watermark](#Watermarking) object to use for the image's watermark.
53 | * `successUrl` : a url for MediaMachine to POST to when the thumbnail has been created.
54 | * `failureUrl` : a url for MediaMachine to POST to when the thumbnail could not be created.
55 |
56 | The simplest version might be:
57 |
58 | ```javascript
59 | const job = await mediaMachine.thumbnail()
60 | .fromUrl("https://myserver.example/someVideo.mp4")
61 | .toUrl("https://myserver.example/api/uploadFile");
62 | ```
63 |
64 | Here's an example usage that takes a video from Amazon S3 and puts a thumbnail back to Amazon S3.
65 |
66 | ```javascript
67 | const job = await mediaMachine.thumbnail({
68 | width: 150,
69 | watermark: mediaMachine.textWatermark("media machine!"),
70 | successUrl: "https://myserver.example/api/mediamachineSuccess",
71 | failureUrl: "https://myserver.example/api/mediamachineFailure",
72 | })
73 | .fromS3(AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET, INPUT_KEY)
74 | .toS3(AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET, OUTPUT_KEY);
75 | ```
76 |
77 | Here's an example usage that takes a video from Azure and puts a thumbnail back to Azure with a full [watermark](#watermark()) configuration:
78 |
79 | ```javascript
80 | const watermark = mediaMachine.textWatermark("media machine!!!", {
81 | fontSize: 14,
82 | fontColor: "#ffffff",
83 | opacity: 0.9,
84 | position: "bottomRight",
85 | });
86 |
87 | const job = await mediaMachine.thumbnail({
88 | watermark: watermark,
89 | })
90 | .fromAzure(ACCOUNT_KEY, ACCOUNT_NAME, BUCKET, INPUT_KEY)
91 | .toAzure(ACCOUNT_KEY, ACCOUNT_NAME, BUCKET, OUTPUT_KEY);
92 | ```
93 |
94 | ### transcodeToMp4()
95 |
96 | The `transcodeToMp4()` method transcodes SD/HD/FHD videos from virtually any format to Mp4.
97 |
98 | This method takes a single argument of the following optional inputs:
99 |
100 | * `height` : number representing the desired height of the video output.
101 | * `width` : number representing the desired width of the video output.
102 | * `watermark` : a [Watermark](#Watermarking) object to used for the image's watermark.
103 | * `encoder` : "h264", "h265", "vp8", "vp9" (default: "h264")
104 | * `successUrl` : a url for MediaMachine to POST to when the thumbnail has been created.
105 | * `failureUrl` : a url for MediaMachine to POST to when the thumbnail could not be created.
106 |
107 | The simplest version might be:
108 |
109 | ```javascript
110 | const job = await mediaMachine.transcodeToMp4()
111 | .fromUrl("https://myserver.example/someVideo.avi")
112 | .toUrl("https://myserver.example/api/uploadFile");
113 | ```
114 |
115 | Here's an example usage that takes a video from Azure and puts an h265 mp4 version of it back to Azure.
116 |
117 | ```javascript
118 | const job = await mediaMachine.transcodeToMp4({
119 | width: 150,
120 | height: 150,
121 | encoder: "h265",
122 | successUrl: "https://myserver.example/api/mediamachineSuccess",
123 | failureUrl: "https://myserver.example/api/mediamachineFailure",
124 | })
125 | .fromAzure(ACCOUNT_KEY, ACCOUNT_NAME, BUCKET, INPUT_KEY)
126 | .toAzure(ACCOUNT_KEY, ACCOUNT_NAME, BUCKET, OUTPUT_KEY);
127 | ```
128 |
129 | Here's an example usage that takes a video from Google Cloud and puts an mp4 video back to Google Cloud with a full watermark configuration:
130 |
131 | ```javascript
132 | const watermark = mediaMachine.textWatermark("media machine!!!", {
133 | fontSize: 14,
134 | fontColor: "#ffffff",
135 | opacity: 0.9,
136 | position: "bottomRight",
137 | });
138 |
139 | const job = await mediaMachine.transcodeToWebm({
140 | watermark: watermark,
141 | })
142 | .fromGCloud(GCLOUD_CREDS, BUCKET, INPUT_KEY)
143 | .toGCloud(GCLOUD_CREDS, BUCKET, OUTPUT_KEY);
144 | ```
145 |
146 |
147 | ### transcodeToWebm()
148 |
149 | The `transcodeToWebm()` method transcodes SD/HD/FHD videos from virtually any format to Webm.
150 |
151 | This method takes a single argument of the following optional inputs:
152 |
153 | * `height` : number representing the desired height of the video output.
154 | * `width` : number representing the desired width of the video output.
155 | * `watermark` : a [Watermark](#Watermarking) object to used for the image's watermark.
156 | * `encoder` : "vp8", "vp9" (default: "vp8")
157 | * `successUrl` : a url for MediaMachine to POST to when the thumbnail has been created.
158 | * `failureUrl` : a url for MediaMachine to POST to when the thumbnail could not be created.
159 |
160 | The simplest version might be:
161 |
162 | ```javascript
163 | const job = await mediaMachine.transcodeToWebm()
164 | .fromUrl("https://myserver.example/someVideo.avi")
165 | .toUrl("https://myserver.example/api/uploadFile");
166 | ```
167 |
168 | Here's an example usage that takes a video from Azure and puts a vp9 webm version of it back to Azure.
169 |
170 | ```javascript
171 | const job = await mediaMachine.transcodeToWebm({
172 | width: 150,
173 | height: 150,
174 | encoder: "vp9",
175 | successUrl: "https://myserver.example/api/mediamachineSuccess",
176 | failureUrl: "https://myserver.example/api/mediamachineFailure",
177 | })
178 | .fromAzure(ACCOUNT_KEY, ACCOUNT_NAME, BUCKET, INPUT_KEY)
179 | .toAzure(ACCOUNT_KEY, ACCOUNT_NAME, BUCKET, OUTPUT_KEY);
180 | ```
181 |
182 | Here's an example usage that takes a video from Google Cloud and puts a webm version back to Google Cloud with a full watermark configuration:
183 |
184 | ```javascript
185 | const watermark = mediaMachine.textWatermark("media machine!!!", {
186 | fontSize: 14,
187 | fontColor: "#ffffff",
188 | opacity: 0.9,
189 | position: "bottomRight",
190 | });
191 |
192 | const job = await mediaMachine.transcodeToWebm({
193 | watermark: watermark,
194 | })
195 | .fromGCloud(GCLOUD_CREDS, BUCKET, INPUT_KEY)
196 | .toGCloud(GCLOUD_CREDS, BUCKET, OUTPUT_KEY);
197 | ```
198 |
199 | ### summary()
200 |
201 | The `summary()` method creates a shorter summary/preview of the input video in GIF or MP4 format.
202 |
203 | Note: For MP4 video summary, the input video should be more than 15 seconds long.
204 |
205 | This method takes a single argument of the following optional inputs:
206 |
207 | * `width` : number representing the desired width of the video output.
208 | * `watermark` : a [Watermark](#Watermarking) object to use for the image's watermark.
209 | * `format` : "mp4", "gif" -- the output format you want (default: "gif")
210 | * `removeAudio` : a boolean to indicate whether to remove audio (default: false, applies only to mp4s)
211 | * `successUrl` : a url for MediaMachine to POST to when the thumbnail has been created.
212 | * `failureUrl` : a url for MediaMachine to POST to when the thumbnail could not be created.
213 |
214 | The simplest version might be:
215 |
216 | ```javascript
217 | const job = await mediaMachine.summary()
218 | .fromUrl("https://myserver.example/someVideo.mp4")
219 | .toUrl("https://myserver.example/api/uploadFile");
220 | ```
221 |
222 | Here's an example usage that takes a video from Google Cloud and puts a silent summarized mp4 version of it back to Google Cloud.
223 |
224 | ```javascript
225 | const job = await mediaMachine.summary({
226 | width: 150,
227 | watermark: mediaMachine.textWatermark("media machine!"),
228 | format: "mp4",
229 | removeAudio: true,
230 | successUrl: "https://myserver.example/api/mediamachineSuccess",
231 | failureUrl: "https://myserver.example/api/mediamachineFailure",
232 | })
233 | .fromGCloud(GCLOUD_CREDS, BUCKET, INPUT_KEY)
234 | .toGCloud(GCLOUD_CREDS, BUCKET, OUTPUT_KEY);
235 | ```
236 |
237 | Here's an example usage that takes a video from Amazon S3 and puts a summarized gif back to Amazon S3 with a full watermark configuration:
238 |
239 | ```javascript
240 | const watermark = mediaMachine.textWatermark("media machine!!!", {
241 | fontSize: 14,
242 | fontColor: "#ffffff",
243 | opacity: 0.9,
244 | position: "bottomRight",
245 | });
246 |
247 | const job = await mediaMachine.summary({
248 | watermark: watermark,
249 | })
250 | .fromS3(AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET, INPUT_KEY)
251 | .toS3(AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BUCKET, OUTPUT_KEY);
252 | ```
253 |
254 | ### Job
255 |
256 | A Job object is what's ultimately returned from your request. You can query the job's status at any time with the `status()` method.
257 |
258 | The possible states for the job are:
259 |
260 | - `notStarted` (The job has not been started at all).
261 | - `queued` (The job is waiting to be executed).
262 | - `done` (The job has finished successfully)
263 | - `errored` (The job failed)
264 |
265 | To get the status you can do:
266 |
267 | ```javascript
268 | await job.status();
269 | ```
270 |
271 |
272 | ## Watermarking
273 |
274 | A watermark is an image that is laid over another image or video, usually to add branding to it.
275 |
276 | You can configure watermarking for any/all of your summary(), thumbnail(), transcodeToMp4(), and transcodeToWebm() calls by first creating a watermark, and then supplying it in the optional arguments to `summary()`, `thumbnail()`, `transcodeToWebm()` or `transcodeToMp4()` as the `watermark` parameter.
277 |
278 | There are two types of watermarks:
279 | * text watermarks where you supply and configure some text to be the watermark. ( see [textWatermark()](#textWatermark(text, [options])) )
280 | * image watermarks where you supply and configure an image to be the watermark ( see [imageWatermark()](#imageWatermark()) ).
281 |
282 | ### textWatermark(text, [options])
283 |
284 | The `textWatermark(text, [options])` method takes a string of text to use as well as an additional argument of the following optional inputs:
285 |
286 | * `fontSize` : the size for the text (a number, default: 12)
287 | * `fontColor` : the color for the text ( default: "#000000")
288 | * `opacity` : number between 0 and 1 representing the desired opacity of the output. 0 is full transparent and 1 is fully opaque (default: 1)
289 | * `position` : "topLeft", "topRight", "bottomLeft", "bottomRight" (default: "bottomRight")
290 |
291 | The most simple example is probably:
292 |
293 | ```javascript
294 | const watermark = mediaMachine.textWatermark("media machine!!!");
295 | ```
296 |
297 | Here's a more complex example using all the options:
298 |
299 | ```javascript
300 | const watermark = mediaMachine.textWatermark("media machine!!!", {
301 | fontSize: 14,
302 | fontColor: "#ffffff",
303 | opacity: 0.9,
304 | position: "bottomRight",
305 | });
306 | ```
307 |
308 | ### imageWatermark()
309 |
310 | The imageWatermark() method takes a single argument of the following optional inputs:
311 |
312 | * `url` : the url of the image to be used
313 | * `uploaded_image_name` : the name of the uploaded image to be used
314 | * `width` : number representing the desired width of the video output
315 | * `height` : number representing the desired height of the video output
316 | * `opacity` : number between 0 and 1 representing the desired opacity of the output. 0 is full transparent and 1 is fully opaque (default: 1)
317 | * `position` : "topLeft", "topRight", "bottomLeft", "bottomRight" (default: "bottomRight")
318 |
319 | *NB:* You must supply `uploaded_image_name` or `url`, but not both.
320 |
321 | Here's a simple example using a url:
322 |
323 | ```javascript
324 | const watermark = mediaMachine.imageWatermark({
325 | url: "https://myserver.example/asdf.jpg",
326 | });
327 | ```
328 |
329 | Here's another simple example using a named watermark, after you upload one to our servers:
330 |
331 | ```javascript
332 | const watermark = mediaMachine.imageWatermark({
333 | uploaded_image_name: "company_watermark",
334 | });
335 | ```
336 |
337 | Here's an example with all the options:
338 |
339 | ```javascript
340 | const watermark = mediaMachine.imageWatermark({
341 | uploaded_image_name: "company_watermark",
342 | position: "bottomLeft",
343 | height: 40,
344 | width: 90,
345 | opacity: 0.9,
346 | });
347 | ```
348 |
--------------------------------------------------------------------------------
/assets/img/mediamachine-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/stackrock/mediamachinejs/b7decc38a493875652d6e52cbe443fd8f73a7933/assets/img/mediamachine-logo.png
--------------------------------------------------------------------------------
/jest.config.js:
--------------------------------------------------------------------------------
1 | // For a detailed explanation regarding each configuration property, visit:
2 | // https://jestjs.io/docs/en/configuration.html
3 |
4 | module.exports = {
5 | // All imported modules in your tests should be mocked automatically
6 | // automock: false,
7 |
8 | // Stop running tests after `n` failures
9 | // bail: 0,
10 |
11 | // The directory where Jest should store its cached dependency information
12 | // cacheDirectory: "/tmp/jest_rs",
13 |
14 | // Automatically clear mock calls and instances between every test
15 | clearMocks: true,
16 |
17 | // Indicates whether the coverage information should be collected while executing the test
18 | // collectCoverage: false,
19 |
20 | // An array of glob patterns indicating a set of files for which coverage information should be collected
21 | // collectCoverageFrom: undefined,
22 |
23 | // The directory where Jest should output its coverage files
24 | coverageDirectory: "coverage",
25 |
26 | // An array of regexp pattern strings used to skip coverage collection
27 | // coveragePathIgnorePatterns: [
28 | // "/node_modules/"
29 | // ],
30 |
31 | // Indicates which provider should be used to instrument code for coverage
32 | // coverageProvider: "babel",
33 |
34 | // A list of reporter names that Jest uses when writing coverage reports
35 | // coverageReporters: [
36 | // "json",
37 | // "text",
38 | // "lcov",
39 | // "clover"
40 | // ],
41 |
42 | // An object that configures minimum threshold enforcement for coverage results
43 | // coverageThreshold: undefined,
44 |
45 | // A path to a custom dependency extractor
46 | // dependencyExtractor: undefined,
47 |
48 | // Make calling deprecated APIs throw helpful error messages
49 | // errorOnDeprecated: false,
50 |
51 | // Force coverage collection from ignored files using an array of glob patterns
52 | // forceCoverageMatch: [],
53 |
54 | // A path to a module which exports an async function that is triggered once before all test suites
55 | // globalSetup: undefined,
56 |
57 | // A path to a module which exports an async function that is triggered once after all test suites
58 | // globalTeardown: undefined,
59 |
60 | // A set of global variables that need to be available in all test environments
61 | // globals: {},
62 |
63 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
64 | // maxWorkers: "50%",
65 |
66 | // An array of directory names to be searched recursively up from the requiring module's location
67 | // moduleDirectories: [
68 | // "node_modules"
69 | // ],
70 |
71 | // An array of file extensions your modules use
72 | // moduleFileExtensions: [
73 | // "js",
74 | // "json",
75 | // "jsx",
76 | // "ts",
77 | // "tsx",
78 | // "node"
79 | // ],
80 |
81 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
82 | // moduleNameMapper: {},
83 |
84 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
85 | // modulePathIgnorePatterns: [],
86 |
87 | // Activates notifications for test results
88 | // notify: false,
89 |
90 | // An enum that specifies notification mode. Requires { notify: true }
91 | // notifyMode: "failure-change",
92 |
93 | // A preset that is used as a base for Jest's configuration
94 | // preset: undefined,
95 |
96 | // Run tests from one or more projects
97 | // projects: undefined,
98 |
99 | // Use this configuration option to add custom reporters to Jest
100 | // reporters: undefined,
101 |
102 | // Automatically reset mock state between every test
103 | // resetMocks: false,
104 |
105 | // Reset the module registry before running each individual test
106 | // resetModules: false,
107 |
108 | // A path to a custom resolver
109 | // resolver: undefined,
110 |
111 | // Automatically restore mock state between every test
112 | // restoreMocks: false,
113 |
114 | // The root directory that Jest should scan for tests and modules within
115 | // rootDir: undefined,
116 |
117 | // A list of paths to directories that Jest should use to search for files in
118 | // roots: [
119 | // ""
120 | // ],
121 |
122 | // Allows you to use a custom runner instead of Jest's default test runner
123 | // runner: "jest-runner",
124 |
125 | // The paths to modules that run some code to configure or set up the testing environment before each test
126 | // setupFiles: [],
127 |
128 | // A list of paths to modules that run some code to configure or set up the testing framework before each test
129 | // setupFilesAfterEnv: [],
130 |
131 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing
132 | // snapshotSerializers: [],
133 |
134 | // The test environment that will be used for testing
135 | testEnvironment: "node",
136 |
137 | // Options that will be passed to the testEnvironment
138 | // testEnvironmentOptions: {},
139 |
140 | // Adds a location field to test results
141 | // testLocationInResults: false,
142 |
143 | // The glob patterns Jest uses to detect test files
144 | // testMatch: [
145 | // "**/__tests__/**/*.[jt]s?(x)",
146 | // "**/?(*.)+(spec|test).[tj]s?(x)"
147 | // ],
148 |
149 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
150 | // testPathIgnorePatterns: [
151 | // "/node_modules/"
152 | // ],
153 |
154 | // The regexp pattern or array of patterns that Jest uses to detect test files
155 | // testRegex: [],
156 |
157 | // This option allows the use of a custom results processor
158 | // testResultsProcessor: undefined,
159 |
160 | // This option allows use of a custom test runner
161 | // testRunner: "jasmine2",
162 |
163 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
164 | // testURL: "http://localhost",
165 |
166 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
167 | // timers: "real",
168 |
169 | // A map from regular expressions to paths to transformers
170 | // transform: undefined,
171 |
172 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
173 | // transformIgnorePatterns: [
174 | // "/node_modules/"
175 | // ],
176 |
177 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
178 | // unmockedModulePathPatterns: undefined,
179 |
180 | // Indicates whether each individual test should be reported during the run
181 | // verbose: undefined,
182 |
183 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
184 | // watchPathIgnorePatterns: [],
185 |
186 | // Whether to use watchman for file crawling
187 | // watchman: true,
188 | roots: ['/src'],
189 | transform: {
190 | '^.+\\.tsx?$': 'ts-jest',
191 | },
192 | // testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$',
193 | moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
194 | };
195 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mediamachine",
3 | "version": "1.0.6",
4 | "description": "MediaMachine services client api",
5 | "main": "lib/index.js",
6 | "repository": {
7 | "type": "git",
8 | "url": "https://github.com/stackrock/mediamachinejs.git"
9 | },
10 | "scripts": {
11 | "test": "jest --coverage && coveralls < coverage/lcov.info",
12 | "build": "tsc",
13 | "prepublish": "npm run build"
14 | },
15 | "author": "Stackrock ",
16 | "license": "ISC",
17 | "dependencies": {
18 | "axios": "0.21.1"
19 | },
20 | "keywords": [
21 | "stackrock",
22 | "mediamachine",
23 | "video",
24 | "transcoding",
25 | "thumbnailing",
26 | "transcode",
27 | "thumbnail",
28 | "h264",
29 | "vp8",
30 | "vp9",
31 | "mp4",
32 | "webm",
33 | "h265"
34 | ],
35 | "homepage": "https://mediamachine.io",
36 | "devDependencies": {
37 | "@types/axios": "^0.14.0",
38 | "@types/jest": "^26.0.3",
39 | "axios-mock-adapter": "^1.19",
40 | "coveralls": "^3.1.0",
41 | "dotenv": "^8.2.0",
42 | "jest": "^26.1.0",
43 | "ts-jest": "^26.1.1",
44 | "ts-node": "^9.0.0",
45 | "typescript": "^3.9.5"
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/Executable.ts:
--------------------------------------------------------------------------------
1 | import { Job } from "./job";
2 | import { Blob } from "./blob";
3 |
4 | export interface Executable {
5 | apiKey: string;
6 | execute: () => Promise;
7 | to: (destination: string | Blob) => Executable;
8 | }
--------------------------------------------------------------------------------
/src/Newable.ts:
--------------------------------------------------------------------------------
1 |
2 | /* new T() */
3 | export type Newable = { new (...args: any[]): T; };
4 |
--------------------------------------------------------------------------------
/src/WorkerConfig.ts:
--------------------------------------------------------------------------------
1 | import { Blob, Store } from "./blob";
2 | import { Executable } from "./Executable";
3 | import { Newable } from "./Newable";
4 |
5 | export class WorkerConfig {
6 | apiKey: string;
7 | targetKlass: any;
8 |
9 | constructor(apiKey: string, targetKlass: Newable) {
10 | this.apiKey = apiKey;
11 | this.targetKlass = targetKlass;
12 | }
13 |
14 | getExecutable(from: string | Blob): Executable {
15 | return;
16 | }
17 |
18 | fromS3(
19 | region: string,
20 | accessKeyId: string,
21 | secretAccessKey: string,
22 | bucket: string,
23 | inputKey: string
24 | ) {
25 | const inputFile = new Blob(
26 | {
27 | region,
28 | accessKeyId,
29 | secretAccessKey,
30 | type: Store.S3,
31 | },
32 | bucket,
33 | inputKey
34 | );
35 | const Target = this.targetKlass;
36 | const executable = this.getExecutable(inputFile);
37 | return new Target(executable, inputFile);
38 | }
39 |
40 | fromAzure(
41 | accountKey: string,
42 | accountName: string,
43 | bucket: string,
44 | inputKey: string
45 | ) {
46 | const inputFile = new Blob(
47 | {
48 | accountKey,
49 | accountName,
50 | type: Store.AZURE_BLOB,
51 | },
52 | bucket,
53 | inputKey
54 | );
55 | const Target = this.targetKlass;
56 | const executable = this.getExecutable(inputFile);
57 | return new Target(executable, inputFile);
58 | }
59 |
60 | fromGCloud(json: string, bucket: string, inputKey: string) {
61 | const inputFile = new Blob(
62 | {
63 | json,
64 | type: Store.GOOGLE_BLOB,
65 | },
66 | bucket,
67 | inputKey
68 | );
69 | const Target = this.targetKlass;
70 | const executable = this.getExecutable(inputFile);
71 | return new Target(executable, inputFile);
72 | }
73 |
74 | fromUrl(url: string) {
75 | const Target = this.targetKlass;
76 | const executable = this.getExecutable(url);
77 | return new Target(executable, url);
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/src/WorkerTarget.ts:
--------------------------------------------------------------------------------
1 | import { Executable } from "./Executable";
2 | import { Blob, Store } from "./blob";
3 |
4 | export class WorkerTarget {
5 | workerConfig: T;
6 |
7 | constructor(transcoder: T) {
8 | this.workerConfig = transcoder;
9 | }
10 |
11 | async toAzure(
12 | accountKey: string,
13 | accountName: string,
14 | bucket: string,
15 | inputKey: string
16 | ) {
17 | // create the output blob
18 | const outputFile = new Blob(
19 | {
20 | accountKey,
21 | accountName,
22 | type: Store.AZURE_BLOB,
23 | },
24 | bucket,
25 | inputKey
26 | );
27 |
28 | const job = await this.workerConfig.to(outputFile).execute();
29 | return job;
30 | }
31 |
32 | async toGCloud(json: string, bucket: string, inputKey: string) {
33 | // create the output blob
34 | const outputFile = new Blob(
35 | {
36 | json,
37 | type: Store.GOOGLE_BLOB,
38 | },
39 | bucket,
40 | inputKey
41 | );
42 |
43 | const job = await this.workerConfig.to(outputFile).execute();
44 | return job;
45 | }
46 |
47 | async toS3(
48 | region: string,
49 | accessKeyId: string,
50 | secretAccessKey: string,
51 | bucket: string,
52 | inputKey: string
53 | ) {
54 | // create the output blob
55 | const outputFile = new Blob(
56 | {
57 | region,
58 | accessKeyId,
59 | secretAccessKey,
60 | type: Store.S3,
61 | },
62 | bucket,
63 | inputKey
64 | );
65 |
66 | const job = await this.workerConfig.to(outputFile).execute();
67 | return job;
68 | }
69 |
70 | async toUrl(url: string) {
71 | const job = await this.workerConfig.to(url).execute();
72 | return job;
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/api.ts:
--------------------------------------------------------------------------------
1 | import axios from "axios";
2 |
3 | const BASE_API_PATH = "https://api.mediamachine.io";
4 |
5 | const SERVICES_TO_PATH = {
6 | thumbnail: "/thumbnail",
7 | gif_summary: "/summary/gif",
8 | mp4_summary: "/summary/mp4",
9 | transcode: "/transcode",
10 | };
11 |
12 | export type JobStatus = "notStarted" | "queued" | "errored" | "done";
13 |
14 | function includes(arr: unknown[], elem: unknown) {
15 | for (const item of arr) {
16 | if (item === elem) {
17 | return true;
18 | }
19 | }
20 |
21 | return false;
22 | }
23 |
24 | export class API {
25 | static async createJob(jobType: string, body: unknown) {
26 | if (!includes(Object.keys(SERVICES_TO_PATH), jobType)) {
27 | return; //throw an error
28 | }
29 |
30 | const uri = `${BASE_API_PATH}${SERVICES_TO_PATH[jobType]}`;
31 | const res = await axios.post(uri, body);
32 |
33 | if (res.status !== 201 && res.status !== 200) {
34 | throw new Error(`Got ${res.status} for body: ${JSON.stringify(body)}`);
35 | }
36 |
37 | return res;
38 | }
39 |
40 | static async jobStatus(reqId: string): Promise {
41 | const uri = `${BASE_API_PATH}/job/status?reqId=${reqId}`;
42 | const res = await axios.get(uri);
43 |
44 | if (res.status === 404) {
45 | return "notStarted";
46 | }
47 |
48 | if (res.status === 200) {
49 | if (res.data.status === "errored") {
50 | return "errored";
51 | } else if (res.data.status === "done") {
52 | return "done";
53 | } else if (res.data.status === "queued") {
54 | return "queued";
55 | }
56 | }
57 |
58 | return "notStarted";
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/blob.ts:
--------------------------------------------------------------------------------
1 | export enum Store {
2 | S3 = "s3",
3 | AZURE_BLOB = "azure",
4 | GOOGLE_BLOB = "gcp",
5 | } // these are also protocol prefixes
6 |
7 | export interface AWSCreds {
8 | accessKeyId: string;
9 | secretAccessKey: string;
10 | region: string;
11 | type: Store.S3;
12 | }
13 |
14 | export interface AzureCreds {
15 | accountName: string;
16 | accountKey: string;
17 | type: Store.AZURE_BLOB;
18 | }
19 |
20 | export interface GCPCreds {
21 | json: string;
22 | type: Store.GOOGLE_BLOB;
23 | }
24 |
25 | export type Credentials = AWSCreds | AzureCreds | GCPCreds;
26 |
27 | export class Blob {
28 | blobStore: Store;
29 | blobBucket: string;
30 | blobKey: string;
31 | awsCreds?: AWSCreds;
32 | azureCreds?: AzureCreds;
33 | gcpCreds?: GCPCreds;
34 |
35 | constructor(creds: Credentials, bucket: string, key: string) {
36 | if (creds.type === Store.S3) {
37 | this.awsCreds = creds;
38 | this.blobStore = creds.type;
39 | } else if (creds.type === Store.AZURE_BLOB) {
40 | this.azureCreds = creds;
41 | this.blobStore = creds.type;
42 | } else if (creds.type === Store.GOOGLE_BLOB) {
43 | this.gcpCreds = creds;
44 | this.blobStore = creds.type;
45 | } else {
46 | throw new Error("Invalid Credential type");
47 | }
48 | this.blobBucket = bucket;
49 | this.blobKey = key;
50 | }
51 |
52 | toApiCredentials() {
53 | if (!!this.gcpCreds) {
54 | // special case, we want the JSON embedded
55 | return JSON.parse(this.gcpCreds.json);
56 | }
57 | const creds = this.awsCreds || this.azureCreds || undefined;
58 | const omitSingle = (key: string, { [key]: _, ...obj }) => obj;
59 | return omitSingle("type", creds);
60 | }
61 |
62 | toApiUrl(): string {
63 | const protocol = this.blobStore;
64 | const url = `${protocol}://${encodeURIComponent(
65 | this.blobBucket
66 | )}/${encodeURIComponent(this.blobKey)}`;
67 | return url;
68 | }
69 |
70 | toJSON() {
71 | const json: any = {
72 | store: this.blobStore,
73 | bucket: this.blobBucket,
74 | key: this.blobKey,
75 | };
76 |
77 | if (!!this.awsCreds) {
78 | delete this.awsCreds.type;
79 | json.awsCreds = this.awsCreds;
80 | }
81 |
82 | if (!!this.azureCreds) {
83 | delete this.azureCreds.type;
84 | json.azureCreds = this.azureCreds;
85 | }
86 |
87 | if (!!this.gcpCreds) {
88 | delete this.gcpCreds.type;
89 | json.gcpCreds = this.gcpCreds;
90 | }
91 |
92 | return json;
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/index.test.ts:
--------------------------------------------------------------------------------
1 | import axios from "axios";
2 | import MockAdapter from "axios-mock-adapter";
3 | import { MediaMachine } from "./index";
4 |
5 | const FAKE_SUCCESS_URL = "http://mediamachine.io/success";
6 | const FAKE_FAILURE_URL = "http://mediamachine.io/failure";
7 | const FAKE_INPUT_URL = "http://mediamachine.io/path/to/image.png";
8 | const FAKE_OUTPUT_URL = "http://mediamachine.io/path/to/output/image";
9 | const FAKE_AWS_REGION = "us-east-1";
10 | const FAKE_AWS_ACCESS_KEY_ID = "123";
11 | const FAKE_AWS_SECRET_ACCESS_KEY = "abc";
12 | const FAKE_AZURE_ACCOUNT_KEY = "azure-account-key";
13 | const FAKE_AZURE_ACCOUNT_NAME = "azure-account-name";
14 |
15 | describe("Mediamachine", () => {
16 | const API_KEY = "test-jest-123-test-c123a980-7173-11eb-8a10-1fc5d5c9c235";
17 | const BASE_URL = "https://api.mediamachine.io";
18 | let mediamachine;
19 |
20 | beforeEach(() => {
21 | mediamachine = new MediaMachine(API_KEY);
22 | });
23 |
24 | describe("mediamachine", () => {
25 | test("with a null apikey throws an error", () => {
26 | expect(() => new MediaMachine(null)).toThrow();
27 | });
28 |
29 | test("with empty string apikey throws an error", () => {
30 | expect(() => new MediaMachine("")).toThrow();
31 | });
32 |
33 | test("with apikey only using spaces throws an error", () => {
34 | expect(() => new MediaMachine(" ")).toThrow();
35 | });
36 | });
37 |
38 | describe("thumbnail", () => {
39 | let mock;
40 | const reqId = "42";
41 | const retData = { id: reqId, status: "queued", createdAt: new Date() };
42 | beforeEach(() => {
43 | mock = new MockAdapter(axios);
44 | });
45 |
46 | afterEach(() => {
47 | mock.reset();
48 | });
49 |
50 | test("with all required properties, using URLs does not fail", async () => {
51 | const expectedBody = {
52 | apiKey: API_KEY,
53 | successURL: FAKE_SUCCESS_URL,
54 | failureURL: FAKE_FAILURE_URL,
55 | inputURL: FAKE_INPUT_URL,
56 | outputURL: FAKE_OUTPUT_URL,
57 | width: "150",
58 | };
59 |
60 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
61 |
62 | const job = await mediamachine
63 | .thumbnail({
64 | successUrl: FAKE_SUCCESS_URL,
65 | failureUrl: FAKE_FAILURE_URL,
66 | width: 150,
67 | })
68 | .fromUrl(FAKE_INPUT_URL)
69 | .toUrl(FAKE_OUTPUT_URL);
70 |
71 | expect(job.reqId).toEqual(reqId);
72 | });
73 |
74 | test("with all required properties, using a text watermark", async () => {
75 | const expectedBody = {
76 | apiKey: API_KEY,
77 | successURL: FAKE_SUCCESS_URL,
78 | failureURL: FAKE_FAILURE_URL,
79 | inputURL: FAKE_INPUT_URL,
80 | outputURL: FAKE_OUTPUT_URL,
81 | watermark: {
82 | text: "mediamachine.io",
83 | fontSize: "12",
84 | fontColor: "white",
85 | opacity: "0.9",
86 | position: "bottomRight",
87 | },
88 | width: "150",
89 | };
90 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
91 |
92 | const job = await mediamachine
93 | .thumbnail({
94 | successUrl: FAKE_SUCCESS_URL,
95 | failureUrl: FAKE_FAILURE_URL,
96 | width: 150,
97 | watermark: mediamachine.textWatermark("mediamachine.io"),
98 | })
99 | .fromUrl(FAKE_INPUT_URL)
100 | .toUrl(FAKE_OUTPUT_URL);
101 |
102 | expect(job.reqId).toEqual(reqId);
103 | });
104 |
105 | test("with all required properties, using an image watermark", async () => {
106 | const expectedBody = {
107 | apiKey: API_KEY,
108 | successURL: FAKE_SUCCESS_URL,
109 | failureURL: FAKE_FAILURE_URL,
110 | inputURL: FAKE_INPUT_URL,
111 | outputURL: FAKE_OUTPUT_URL,
112 | width: "150",
113 | watermark: {
114 | width: 400,
115 | height: 200,
116 | imageUrl: "http://path.com/to/your/image",
117 | opacity: "0.9",
118 | position: "bottomRight",
119 | },
120 | };
121 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
122 |
123 | const job = await mediamachine
124 | .thumbnail({
125 | successUrl: FAKE_SUCCESS_URL,
126 | failureUrl: FAKE_FAILURE_URL,
127 | width: 150,
128 | watermark: mediamachine.imageWatermark({
129 | height: 200,
130 | width: 400,
131 | url: "http://path.com/to/your/image",
132 | }),
133 | })
134 | .fromUrl(FAKE_INPUT_URL)
135 | .toUrl(FAKE_OUTPUT_URL);
136 | expect(job.reqId).toEqual(reqId);
137 | });
138 |
139 | test("with all required properties, and no width", async () => {
140 | const expectedBody = {
141 | apiKey: API_KEY,
142 | successURL: FAKE_SUCCESS_URL,
143 | failureURL: FAKE_FAILURE_URL,
144 | inputURL: FAKE_INPUT_URL,
145 | outputURL: FAKE_OUTPUT_URL,
146 | width: "720",
147 | };
148 |
149 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
150 |
151 | const job = await mediamachine
152 | .thumbnail({
153 | successUrl: FAKE_SUCCESS_URL,
154 | failureUrl: FAKE_FAILURE_URL,
155 | })
156 | .fromUrl(FAKE_INPUT_URL)
157 | .toUrl(FAKE_OUTPUT_URL);
158 | expect(job.reqId).toEqual(reqId);
159 | });
160 |
161 | test("with all required properties, using AWS for input does not fail", async () => {
162 | const expectedBody = {
163 | apiKey: API_KEY,
164 | inputCreds: {
165 | region: FAKE_AWS_REGION,
166 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
167 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
168 | },
169 | successURL: FAKE_SUCCESS_URL,
170 | failureURL: FAKE_FAILURE_URL,
171 | inputURL: "s3://test-bucket/test-key",
172 | outputURL: FAKE_OUTPUT_URL,
173 | width: "150",
174 | };
175 |
176 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
177 |
178 | const job = await mediamachine
179 | .thumbnail({
180 | successUrl: FAKE_SUCCESS_URL,
181 | failureUrl: FAKE_FAILURE_URL,
182 | width: 150,
183 | })
184 | .fromS3(
185 | FAKE_AWS_REGION,
186 | FAKE_AWS_ACCESS_KEY_ID,
187 | FAKE_AWS_SECRET_ACCESS_KEY,
188 | "test-bucket",
189 | "test-key"
190 | )
191 | .toUrl(FAKE_OUTPUT_URL);
192 | expect(job.reqId).toEqual(reqId);
193 | });
194 |
195 | test("with all required properties using Azure for input does not fail", async () => {
196 | const expectedBody = {
197 | apiKey: API_KEY,
198 | successURL: FAKE_SUCCESS_URL,
199 | failureURL: FAKE_FAILURE_URL,
200 | inputURL: "azure://test-bucket/test-key",
201 | inputCreds: {
202 | accountName: FAKE_AZURE_ACCOUNT_NAME,
203 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
204 | },
205 | outputURL: FAKE_OUTPUT_URL,
206 | width: "150",
207 | };
208 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
209 |
210 | const job = await mediamachine
211 | .thumbnail({
212 | successUrl: FAKE_SUCCESS_URL,
213 | failureUrl: FAKE_FAILURE_URL,
214 | width: 150,
215 | })
216 | .fromAzure(
217 | FAKE_AZURE_ACCOUNT_KEY,
218 | FAKE_AZURE_ACCOUNT_NAME,
219 | "test-bucket",
220 | "test-key"
221 | )
222 | .toUrl(FAKE_OUTPUT_URL);
223 | expect(job.reqId).toEqual(reqId);
224 | });
225 |
226 | test("with all required properties using GCP for input does not fail", async () => {
227 | const expectedBody = {
228 | apiKey: API_KEY,
229 | successURL: FAKE_SUCCESS_URL,
230 | failureURL: FAKE_FAILURE_URL,
231 | inputURL: "gcp://test-bucket/test-key",
232 | inputCreds: {},
233 | outputURL: FAKE_OUTPUT_URL,
234 | width: "150",
235 | };
236 |
237 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
238 |
239 | const job = await mediamachine
240 | .thumbnail({
241 | successUrl: FAKE_SUCCESS_URL,
242 | failureUrl: FAKE_FAILURE_URL,
243 | width: 150,
244 | })
245 | .fromGCloud("{}", "test-bucket", "test-key")
246 | .toUrl(FAKE_OUTPUT_URL);
247 | expect(job.reqId).toEqual(reqId);
248 | });
249 |
250 | test("with all required properties using AWS for output does not fail", async () => {
251 | const expectedBody = {
252 | apiKey: API_KEY,
253 | successURL: FAKE_SUCCESS_URL,
254 | failureURL: FAKE_FAILURE_URL,
255 | inputURL: FAKE_INPUT_URL,
256 | outputURL: "s3://test-bucket/test-key",
257 | outputCreds: {
258 | region: FAKE_AWS_REGION,
259 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
260 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
261 | },
262 | width: "150",
263 | };
264 |
265 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
266 |
267 | const job = await mediamachine
268 | .thumbnail({
269 | successUrl: FAKE_SUCCESS_URL,
270 | failureUrl: FAKE_FAILURE_URL,
271 | width: 150,
272 | })
273 | .fromUrl(FAKE_INPUT_URL)
274 | .toS3(
275 | FAKE_AWS_REGION,
276 | FAKE_AWS_ACCESS_KEY_ID,
277 | FAKE_AWS_SECRET_ACCESS_KEY,
278 | "test-bucket",
279 | "test-key"
280 | );
281 | expect(job.reqId).toEqual(reqId);
282 | });
283 |
284 | test("with all required properties using Azure for output does not fail", async () => {
285 | const expectedBody = {
286 | apiKey: API_KEY,
287 | outputCreds: {
288 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
289 | accountName: FAKE_AZURE_ACCOUNT_NAME,
290 | },
291 | successURL: FAKE_SUCCESS_URL,
292 | failureURL: FAKE_FAILURE_URL,
293 | inputURL: FAKE_INPUT_URL,
294 | outputURL: "azure://test-bucket/test-key",
295 | width: "150",
296 | };
297 |
298 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
299 |
300 | const job = await mediamachine
301 | .thumbnail({
302 | successUrl: FAKE_SUCCESS_URL,
303 | failureUrl: FAKE_FAILURE_URL,
304 | width: 150,
305 | })
306 | .fromUrl(FAKE_INPUT_URL)
307 | .toAzure(
308 | FAKE_AZURE_ACCOUNT_KEY,
309 | FAKE_AZURE_ACCOUNT_NAME,
310 | "test-bucket",
311 | "test-key"
312 | );
313 | expect(job.reqId).toEqual(reqId);
314 | });
315 |
316 | test("with all required properties using GCP for output does not fail", async () => {
317 | const expectedBody = {
318 | apiKey: API_KEY,
319 | successURL: FAKE_SUCCESS_URL,
320 | failureURL: FAKE_FAILURE_URL,
321 | inputURL: FAKE_INPUT_URL,
322 | outputURL: "gcp://test-bucket/test-key",
323 | outputCreds: {},
324 | width: "150",
325 | };
326 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
327 |
328 | const job = await mediamachine
329 | .thumbnail({
330 | successUrl: FAKE_SUCCESS_URL,
331 | failureUrl: FAKE_FAILURE_URL,
332 | width: 150,
333 | })
334 | .fromUrl(FAKE_INPUT_URL)
335 | .toGCloud("{}", "test-bucket", "test-key");
336 | expect(job.reqId).toEqual(reqId);
337 | });
338 |
339 | test("with all required properties with Blob for input and output does not fail", async () => {
340 | const expectedBody = {
341 | apiKey: API_KEY,
342 | successURL: FAKE_SUCCESS_URL,
343 | failureURL: FAKE_FAILURE_URL,
344 | inputURL: "s3://test-bucket/test-key",
345 | outputURL: "gcp://test-bucket/test-key",
346 | inputCreds: {
347 | region: FAKE_AWS_REGION,
348 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
349 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
350 | },
351 | outputCreds: {},
352 | width: "150",
353 | };
354 |
355 | mock.onPost(`${BASE_URL}/thumbnail`, expectedBody).reply(201, retData);
356 |
357 | const job = await mediamachine
358 | .thumbnail({
359 | successUrl: FAKE_SUCCESS_URL,
360 | failureUrl: FAKE_FAILURE_URL,
361 | width: 150,
362 | })
363 | .fromS3(
364 | FAKE_AWS_REGION,
365 | FAKE_AWS_ACCESS_KEY_ID,
366 | FAKE_AWS_SECRET_ACCESS_KEY,
367 | "test-bucket",
368 | "test-key"
369 | )
370 | .toGCloud("{}", "test-bucket", "test-key");
371 | expect(job.reqId).toEqual(reqId);
372 | });
373 | });
374 |
375 | describe("Gif Summary", () => {
376 | let mock;
377 | const reqId = "42";
378 | const retData = { id: reqId, status: "queued", createdAt: new Date() };
379 | beforeEach(() => {
380 | mock = new MockAdapter(axios);
381 | });
382 |
383 | afterEach(() => {
384 | mock.reset();
385 | });
386 |
387 | test("with all required properties does not fail", async () => {
388 | const expectedBody = {
389 | apiKey: API_KEY,
390 | successURL: FAKE_SUCCESS_URL,
391 | failureURL: FAKE_FAILURE_URL,
392 | inputURL: FAKE_INPUT_URL,
393 | outputURL: FAKE_OUTPUT_URL,
394 | width: "150",
395 | };
396 |
397 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
398 |
399 | const job = await mediamachine
400 | .summary({
401 | format: "gif",
402 | successUrl: FAKE_SUCCESS_URL,
403 | failureUrl: FAKE_FAILURE_URL,
404 | width: 150,
405 | })
406 | .fromUrl(FAKE_INPUT_URL)
407 | .toUrl(FAKE_OUTPUT_URL);
408 | expect(job.reqId).toEqual(reqId);
409 | });
410 |
411 | test("with all required properties, using a text watermark", async () => {
412 | const expectedBody = {
413 | apiKey: API_KEY,
414 | successURL: FAKE_SUCCESS_URL,
415 | failureURL: FAKE_FAILURE_URL,
416 | inputURL: FAKE_INPUT_URL,
417 | outputURL: FAKE_OUTPUT_URL,
418 | watermark: {
419 | text: "mediamachine.io",
420 | fontSize: "12",
421 | fontColor: "white",
422 | opacity: "0.9",
423 | position: "bottomRight",
424 | },
425 | width: "150",
426 | };
427 |
428 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
429 |
430 | const job = await mediamachine
431 | .summary({
432 | format: "gif",
433 | successUrl: FAKE_SUCCESS_URL,
434 | failureUrl: FAKE_FAILURE_URL,
435 | width: 150,
436 | watermark: mediamachine.textWatermark("mediamachine.io"),
437 | })
438 | .fromUrl(FAKE_INPUT_URL)
439 | .toUrl(FAKE_OUTPUT_URL);
440 | expect(job.reqId).toEqual(reqId);
441 | });
442 |
443 | test("with all required properties, using an image watermark", async () => {
444 | const expectedBody = {
445 | apiKey: API_KEY,
446 | successURL: FAKE_SUCCESS_URL,
447 | failureURL: FAKE_FAILURE_URL,
448 | inputURL: FAKE_INPUT_URL,
449 | outputURL: FAKE_OUTPUT_URL,
450 | watermark: {
451 | imageUrl: "http://path.com/to/your/image",
452 | width: 400,
453 | height: 200,
454 | opacity: "0.9",
455 | position: "bottomRight",
456 | },
457 | width: "150",
458 | };
459 |
460 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
461 |
462 | const job = await mediamachine
463 | .summary({
464 | format: "gif",
465 | successUrl: FAKE_SUCCESS_URL,
466 | failureUrl: FAKE_FAILURE_URL,
467 | width: 150,
468 | watermark: mediamachine.imageWatermark({
469 | url: "http://path.com/to/your/image",
470 | width: 400,
471 | height: 200,
472 | }),
473 | })
474 | .fromUrl(FAKE_INPUT_URL)
475 | .toUrl(FAKE_OUTPUT_URL);
476 | expect(job.reqId).toEqual(reqId);
477 | });
478 |
479 | test("with all required properties, and no width", async () => {
480 | const expectedBody = {
481 | apiKey: API_KEY,
482 | successURL: FAKE_SUCCESS_URL,
483 | failureURL: FAKE_FAILURE_URL,
484 | inputURL: FAKE_INPUT_URL,
485 | outputURL: FAKE_OUTPUT_URL,
486 | width: "720",
487 | };
488 |
489 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
490 |
491 | const job = await mediamachine
492 | .summary({
493 | format: "gif",
494 | successUrl: FAKE_SUCCESS_URL,
495 | failureUrl: FAKE_FAILURE_URL,
496 | })
497 | .fromUrl(FAKE_INPUT_URL)
498 | .toUrl(FAKE_OUTPUT_URL);
499 | expect(job.reqId).toEqual(reqId);
500 | });
501 |
502 | test("with all required properties, using AWS for input does not fail", async () => {
503 | const expectedBody = {
504 | apiKey: API_KEY,
505 | successURL: FAKE_SUCCESS_URL,
506 | failureURL: FAKE_FAILURE_URL,
507 | inputURL: "s3://test-bucket/test-key",
508 | inputCreds: {
509 | region: FAKE_AWS_REGION,
510 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
511 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
512 | },
513 | outputURL: FAKE_OUTPUT_URL,
514 | width: "150",
515 | };
516 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
517 |
518 | const job = await mediamachine
519 | .summary({
520 | format: "gif",
521 | successUrl: FAKE_SUCCESS_URL,
522 | failureUrl: FAKE_FAILURE_URL,
523 | width: 150,
524 | })
525 | .fromS3(
526 | FAKE_AWS_REGION,
527 | FAKE_AWS_ACCESS_KEY_ID,
528 | FAKE_AWS_SECRET_ACCESS_KEY,
529 | "test-bucket",
530 | "test-key"
531 | )
532 | .toUrl(FAKE_OUTPUT_URL);
533 | expect(job.reqId).toEqual(reqId);
534 | });
535 |
536 | test("with all required properties using Azure for input does not fail", async () => {
537 | const expectedBody = {
538 | apiKey: API_KEY,
539 | successURL: FAKE_SUCCESS_URL,
540 | failureURL: FAKE_FAILURE_URL,
541 | inputURL: "azure://test-bucket/test-key",
542 | inputCreds: {
543 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
544 | accountName: FAKE_AZURE_ACCOUNT_NAME,
545 | },
546 | outputURL: FAKE_OUTPUT_URL,
547 | width: "150",
548 | };
549 |
550 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
551 |
552 | const job = await mediamachine
553 | .summary({
554 | format: "gif",
555 | successUrl: FAKE_SUCCESS_URL,
556 | failureUrl: FAKE_FAILURE_URL,
557 | width: 150,
558 | })
559 | .fromAzure(
560 | FAKE_AZURE_ACCOUNT_KEY,
561 | FAKE_AZURE_ACCOUNT_NAME,
562 | "test-bucket",
563 | "test-key"
564 | )
565 | .toUrl(FAKE_OUTPUT_URL);
566 | expect(job.reqId).toEqual(reqId);
567 | });
568 |
569 | test("with all required properties using GCP for input does not fail", async () => {
570 | const expectedBody = {
571 | apiKey: API_KEY,
572 | successURL: FAKE_SUCCESS_URL,
573 | failureURL: FAKE_FAILURE_URL,
574 | inputURL: "gcp://test-bucket/test-key",
575 | inputCreds: {},
576 | outputURL: FAKE_OUTPUT_URL,
577 | width: "150",
578 | };
579 |
580 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
581 |
582 | const job = await mediamachine
583 | .summary({
584 | format: "gif",
585 | successUrl: FAKE_SUCCESS_URL,
586 | failureUrl: FAKE_FAILURE_URL,
587 | width: 150,
588 | })
589 | .fromGCloud("{}", "test-bucket", "test-key")
590 | .toUrl(FAKE_OUTPUT_URL);
591 | expect(job.reqId).toEqual(reqId);
592 | });
593 |
594 | test("with all required properties using Azure for output does not fail", async () => {
595 | const expectedBody = {
596 | apiKey: API_KEY,
597 | successURL: FAKE_SUCCESS_URL,
598 | failureURL: FAKE_FAILURE_URL,
599 | inputURL: FAKE_INPUT_URL,
600 | outputURL: "azure://test-bucket/test-key",
601 | outputCreds: {
602 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
603 | accountName: FAKE_AZURE_ACCOUNT_NAME,
604 | },
605 | width: "150",
606 | };
607 |
608 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
609 |
610 | const job = await mediamachine
611 | .summary({
612 | format: "gif",
613 | successUrl: FAKE_SUCCESS_URL,
614 | failureUrl: FAKE_FAILURE_URL,
615 | width: 150,
616 | })
617 | .fromUrl(FAKE_INPUT_URL)
618 | .toAzure(
619 | FAKE_AZURE_ACCOUNT_KEY,
620 | FAKE_AZURE_ACCOUNT_NAME,
621 | "test-bucket",
622 | "test-key"
623 | );
624 | expect(job.reqId).toEqual(reqId);
625 | });
626 |
627 | test("with all required properties using GCP for output does not fail", async () => {
628 | const expectedBody = {
629 | apiKey: API_KEY,
630 | successURL: FAKE_SUCCESS_URL,
631 | failureURL: FAKE_FAILURE_URL,
632 | inputURL: FAKE_INPUT_URL,
633 | outputURL: "gcp://test-bucket/test-key",
634 | outputCreds: {},
635 | width: "150",
636 | };
637 |
638 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
639 |
640 | const job = await mediamachine
641 | .summary({
642 | format: "gif",
643 | successUrl: FAKE_SUCCESS_URL,
644 | failureUrl: FAKE_FAILURE_URL,
645 | width: "150",
646 | })
647 | .fromUrl(FAKE_INPUT_URL)
648 | .toGCloud("{}", "test-bucket", "test-key");
649 | expect(job.reqId).toEqual(reqId);
650 | });
651 |
652 | test("with all required properties with Blob for input and output does not fail", async () => {
653 | const expectedBody = {
654 | apiKey: API_KEY,
655 | successURL: FAKE_SUCCESS_URL,
656 | failureURL: FAKE_FAILURE_URL,
657 | inputURL: "s3://test-bucket/test-key",
658 | inputCreds: {
659 | region: FAKE_AWS_REGION,
660 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
661 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
662 | },
663 | outputURL: "gcp://test-bucket/test-key",
664 | outputCreds: {},
665 | width: "150",
666 | };
667 |
668 | mock.onPost(`${BASE_URL}/summary/gif`, expectedBody).reply(201, retData);
669 |
670 | const job = await mediamachine
671 | .summary({
672 | format: "gif",
673 | successUrl: FAKE_SUCCESS_URL,
674 | failureUrl: FAKE_FAILURE_URL,
675 | width: 150,
676 | })
677 | .fromS3(
678 | FAKE_AWS_REGION,
679 | FAKE_AWS_ACCESS_KEY_ID,
680 | FAKE_AWS_SECRET_ACCESS_KEY,
681 | "test-bucket",
682 | "test-key"
683 | )
684 | .toGCloud("{}", "test-bucket", "test-key");
685 | expect(job.reqId).toEqual(reqId);
686 | });
687 |
688 | test("with null summaryType throws an error", () => {
689 | expect(() => {
690 | return mediamachine
691 | .summary({
692 | successUrl: FAKE_SUCCESS_URL,
693 | failureUrl: FAKE_FAILURE_URL,
694 | width: 150,
695 | })
696 | .fromUrl(FAKE_INPUT_URL)
697 | .toUrl(FAKE_OUTPUT_URL);
698 | }).rejects.toThrow();
699 | });
700 | });
701 |
702 | describe("MP4 Summary", () => {
703 | let mock;
704 | const reqId = "42";
705 | const retData = { id: reqId, status: "queued", createdAt: new Date() };
706 | beforeEach(() => {
707 | mock = new MockAdapter(axios);
708 | });
709 |
710 | afterEach(() => {
711 | mock.reset();
712 | });
713 |
714 | test("with all required properties does not fail", async () => {
715 | const expectedBody = {
716 | apiKey: API_KEY,
717 | successURL: FAKE_SUCCESS_URL,
718 | failureURL: FAKE_FAILURE_URL,
719 | inputURL: FAKE_INPUT_URL,
720 | outputURL: FAKE_OUTPUT_URL,
721 | width: "150",
722 | };
723 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
724 |
725 | const job = await mediamachine
726 | .summary({
727 | format: "mp4",
728 | successUrl: FAKE_SUCCESS_URL,
729 | failureUrl: FAKE_FAILURE_URL,
730 | width: 150,
731 | })
732 | .fromUrl(FAKE_INPUT_URL)
733 | .toUrl(FAKE_OUTPUT_URL);
734 | expect(job.reqId).toEqual(reqId);
735 | });
736 |
737 | test("with all required properties, using a text watermark", async () => {
738 | const expectedBody = {
739 | apiKey: API_KEY,
740 | successURL: FAKE_SUCCESS_URL,
741 | failureURL: FAKE_FAILURE_URL,
742 | inputURL: FAKE_INPUT_URL,
743 | outputURL: FAKE_OUTPUT_URL,
744 | watermark: {
745 | text: "mediamachine.io",
746 | fontSize: "12",
747 | fontColor: "white",
748 | opacity: "0.9",
749 | position: "bottomRight",
750 | },
751 | width: "150",
752 | };
753 |
754 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
755 |
756 | const job = await mediamachine
757 | .summary({
758 | format: "mp4",
759 | successUrl: FAKE_SUCCESS_URL,
760 | failureUrl: FAKE_FAILURE_URL,
761 | watermark: mediamachine.textWatermark("mediamachine.io"),
762 | width: 150,
763 | })
764 | .fromUrl(FAKE_INPUT_URL)
765 | .toUrl(FAKE_OUTPUT_URL);
766 | expect(job.reqId).toEqual(reqId);
767 | });
768 |
769 | test("with all required properties, using an image watermark", async () => {
770 | const expectedBody = {
771 | apiKey: API_KEY,
772 | successURL: FAKE_SUCCESS_URL,
773 | failureURL: FAKE_FAILURE_URL,
774 | inputURL: FAKE_INPUT_URL,
775 | outputURL: FAKE_OUTPUT_URL,
776 | watermark: {
777 | imageUrl: "http://path.com/to/your/image",
778 | width: 400,
779 | height: 200,
780 | opacity: "0.9",
781 | position: "bottomRight",
782 | },
783 | width: "150",
784 | };
785 |
786 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
787 |
788 | const job = await mediamachine
789 | .summary({
790 | format: "mp4",
791 | successUrl: FAKE_SUCCESS_URL,
792 | failureUrl: FAKE_FAILURE_URL,
793 | width: 150,
794 | watermark: mediamachine.imageWatermark({
795 | url: "http://path.com/to/your/image",
796 | width: 400,
797 | height: 200,
798 | }),
799 | })
800 | .fromUrl(FAKE_INPUT_URL)
801 | .toUrl(FAKE_OUTPUT_URL);
802 | expect(job.reqId).toEqual(reqId);
803 | });
804 |
805 | test("with all required properties, and no width", async () => {
806 | const expectedBody = {
807 | apiKey: API_KEY,
808 | successURL: FAKE_SUCCESS_URL,
809 | failureURL: FAKE_FAILURE_URL,
810 | inputURL: FAKE_INPUT_URL,
811 | outputURL: FAKE_OUTPUT_URL,
812 | width: "720",
813 | };
814 |
815 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
816 |
817 | const job = await mediamachine
818 | .summary({
819 | format: "mp4",
820 | successUrl: FAKE_SUCCESS_URL,
821 | failureUrl: FAKE_FAILURE_URL,
822 | })
823 | .fromUrl(FAKE_INPUT_URL)
824 | .toUrl(FAKE_OUTPUT_URL);
825 | expect(job.reqId).toEqual(reqId);
826 | });
827 |
828 | test("with all required properties, using AWS for input does not fail", async () => {
829 | const expectedBody = {
830 | apiKey: API_KEY,
831 | successURL: FAKE_SUCCESS_URL,
832 | failureURL: FAKE_FAILURE_URL,
833 | inputURL: "s3://test-bucket/test-key",
834 | inputCreds: {
835 | region: FAKE_AWS_REGION,
836 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
837 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
838 | },
839 | outputURL: FAKE_OUTPUT_URL,
840 | width: "150",
841 | };
842 |
843 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
844 |
845 | const job = await mediamachine
846 | .summary({
847 | format: "mp4",
848 | successUrl: FAKE_SUCCESS_URL,
849 | failureUrl: FAKE_FAILURE_URL,
850 | width: 150,
851 | })
852 | .fromS3(
853 | FAKE_AWS_REGION,
854 | FAKE_AWS_ACCESS_KEY_ID,
855 | FAKE_AWS_SECRET_ACCESS_KEY,
856 | "test-bucket",
857 | "test-key"
858 | )
859 | .toUrl(FAKE_OUTPUT_URL);
860 | expect(job.reqId).toEqual(reqId);
861 | });
862 |
863 | test("with all required properties using Azure for input does not fail", async () => {
864 | const expectedBody = {
865 | apiKey: API_KEY,
866 | successURL: FAKE_SUCCESS_URL,
867 | failureURL: FAKE_FAILURE_URL,
868 | inputURL: "azure://test-bucket/test-key",
869 | inputCreds: {
870 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
871 | accountName: FAKE_AZURE_ACCOUNT_NAME,
872 | },
873 | outputURL: FAKE_OUTPUT_URL,
874 | width: "150",
875 | };
876 |
877 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
878 | const job = await mediamachine
879 | .summary({
880 | format: "mp4",
881 | successUrl: FAKE_SUCCESS_URL,
882 | failureUrl: FAKE_FAILURE_URL,
883 | width: 150,
884 | })
885 | .fromAzure(
886 | FAKE_AZURE_ACCOUNT_KEY,
887 | FAKE_AZURE_ACCOUNT_NAME,
888 | "test-bucket",
889 | "test-key"
890 | )
891 | .toUrl(FAKE_OUTPUT_URL);
892 | expect(job.reqId).toEqual(reqId);
893 | });
894 |
895 | test("with all required properties using GCP for input does not fail", async () => {
896 | const expectedBody = {
897 | apiKey: API_KEY,
898 | successURL: FAKE_SUCCESS_URL,
899 | failureURL: FAKE_FAILURE_URL,
900 | inputURL: "gcp://test-bucket/test-key",
901 | inputCreds: {},
902 | outputURL: FAKE_OUTPUT_URL,
903 | width: "150",
904 | };
905 |
906 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
907 |
908 | const job = await mediamachine
909 | .summary({
910 | format: "mp4",
911 | successUrl: FAKE_SUCCESS_URL,
912 | failureUrl: FAKE_FAILURE_URL,
913 | width: 150,
914 | })
915 | .fromGCloud("{}", "test-bucket", "test-key")
916 | .toUrl(FAKE_OUTPUT_URL);
917 | expect(job.reqId).toEqual(reqId);
918 | });
919 |
920 | test("with all required properties using Azure for output does not fail", async () => {
921 | const expectedBody = {
922 | apiKey: API_KEY,
923 | successURL: FAKE_SUCCESS_URL,
924 | failureURL: FAKE_FAILURE_URL,
925 | inputURL: FAKE_INPUT_URL,
926 | outputURL: "azure://test-bucket/test-key",
927 | outputCreds: {
928 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
929 | accountName: FAKE_AZURE_ACCOUNT_NAME,
930 | },
931 | width: "150",
932 | };
933 |
934 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
935 |
936 | const job = await mediamachine
937 | .summary({
938 | format: "mp4",
939 | successUrl: FAKE_SUCCESS_URL,
940 | failureUrl: FAKE_FAILURE_URL,
941 | width: 150,
942 | })
943 | .fromUrl(FAKE_INPUT_URL)
944 | .toAzure(
945 | FAKE_AZURE_ACCOUNT_KEY,
946 | FAKE_AZURE_ACCOUNT_NAME,
947 | "test-bucket",
948 | "test-key"
949 | );
950 | expect(job.reqId).toEqual(reqId);
951 | });
952 |
953 | test("with all required properties using GCP for output does not fail", async () => {
954 | const expectedBody = {
955 | apiKey: API_KEY,
956 | successURL: FAKE_SUCCESS_URL,
957 | failureURL: FAKE_FAILURE_URL,
958 | inputURL: FAKE_INPUT_URL,
959 | outputURL: "gcp://test-bucket/test-key",
960 | outputCreds: {},
961 | width: "150",
962 | };
963 |
964 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
965 |
966 | const job = await mediamachine
967 | .summary({
968 | format: "mp4",
969 | failureUrl: FAKE_FAILURE_URL,
970 | successUrl: FAKE_SUCCESS_URL,
971 | width: 150,
972 | })
973 | .fromUrl(FAKE_INPUT_URL)
974 | .toGCloud("{}", "test-bucket", "test-key");
975 | expect(job.reqId).toEqual(reqId);
976 | });
977 |
978 | test("with all required properties with Blob for input and output does not fail", async () => {
979 | const expectedBody = {
980 | apiKey: API_KEY,
981 | successURL: FAKE_SUCCESS_URL,
982 | failureURL: FAKE_FAILURE_URL,
983 | inputURL: "s3://test-bucket/test-key",
984 | inputCreds: {
985 | region: FAKE_AWS_REGION,
986 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
987 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
988 | },
989 | outputURL: "gcp://test-bucket/test-key",
990 | outputCreds: {},
991 | width: "150",
992 | };
993 |
994 | mock.onPost(`${BASE_URL}/summary/mp4`, expectedBody).reply(201, retData);
995 |
996 | const job = await mediamachine
997 | .summary({
998 | format: "mp4",
999 | successUrl: FAKE_SUCCESS_URL,
1000 | failureUrl: FAKE_FAILURE_URL,
1001 | width: 150,
1002 | })
1003 | .fromS3(
1004 | FAKE_AWS_REGION,
1005 | FAKE_AWS_ACCESS_KEY_ID,
1006 | FAKE_AWS_SECRET_ACCESS_KEY,
1007 | "test-bucket",
1008 | "test-key"
1009 | )
1010 | .toGCloud("{}", "test-bucket", "test-key");
1011 | expect(job.reqId).toEqual(reqId);
1012 | });
1013 | });
1014 |
1015 | describe("Transcode", () => {
1016 | let mock;
1017 |
1018 | const reqId = "42";
1019 | const retData = { id: reqId, status: "queued", createdAt: new Date() };
1020 | beforeEach(() => {
1021 | mock = new MockAdapter(axios);
1022 | });
1023 |
1024 | afterEach(() => {
1025 | mock.reset();
1026 | });
1027 |
1028 | test("with all required properties does not fail", async () => {
1029 | const expectedBody = {
1030 | apiKey: API_KEY,
1031 | successURL: FAKE_SUCCESS_URL,
1032 | failureURL: FAKE_FAILURE_URL,
1033 | inputURL: FAKE_INPUT_URL,
1034 | outputURL: FAKE_OUTPUT_URL,
1035 | width: "150",
1036 | height: "200",
1037 | encoder: "h264",
1038 | bitrateKBPS: "2000",
1039 | container: "mp4",
1040 | };
1041 |
1042 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1043 |
1044 | const job = await mediamachine
1045 | .transcodeToMp4({
1046 | encoder: "h264",
1047 | width: 150,
1048 | height: 200,
1049 | failureUrl: FAKE_FAILURE_URL,
1050 | successUrl: FAKE_SUCCESS_URL,
1051 | })
1052 | .fromUrl(FAKE_INPUT_URL)
1053 | .toUrl(FAKE_OUTPUT_URL);
1054 | expect(job.reqId).toEqual(reqId);
1055 | });
1056 |
1057 | test("with all required properties, using a text watermark", async () => {
1058 | const expectedBody = {
1059 | apiKey: API_KEY,
1060 | successURL: FAKE_SUCCESS_URL,
1061 | failureURL: FAKE_FAILURE_URL,
1062 | inputURL: FAKE_INPUT_URL,
1063 | outputURL: FAKE_OUTPUT_URL,
1064 | width: "150",
1065 | height: "200",
1066 | encoder: "h264",
1067 | bitrateKBPS: "2000",
1068 | container: "mp4",
1069 | watermark: {
1070 | text: "mediamachine.io",
1071 | fontSize: "12",
1072 | fontColor: "white",
1073 | opacity: "0.9",
1074 | position: "bottomRight",
1075 | },
1076 | };
1077 |
1078 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1079 |
1080 | const job = await mediamachine
1081 | .transcodeToMp4({
1082 | width: 150,
1083 | height: 200,
1084 | successUrl: FAKE_SUCCESS_URL,
1085 | failureUrl: FAKE_FAILURE_URL,
1086 | watermark: mediamachine.textWatermark("mediamachine.io"),
1087 | })
1088 | .fromUrl(FAKE_INPUT_URL)
1089 | .toUrl(FAKE_OUTPUT_URL);
1090 | expect(job.reqId).toEqual(reqId);
1091 | });
1092 |
1093 | test("with all required properties, using an image watermark", async () => {
1094 | const expectedBody = {
1095 | apiKey: API_KEY,
1096 | successURL: FAKE_SUCCESS_URL,
1097 | failureURL: FAKE_FAILURE_URL,
1098 | inputURL: FAKE_INPUT_URL,
1099 | outputURL: FAKE_OUTPUT_URL,
1100 | width: "150",
1101 | height: "200",
1102 | watermark: {
1103 | imageUrl: "http://path.com/to/your/image",
1104 | width: 400,
1105 | height: 200,
1106 | opacity: "0.9",
1107 | position: "bottomRight",
1108 | },
1109 | encoder: "h264",
1110 | bitrateKBPS: "2000",
1111 | container: "mp4",
1112 | };
1113 |
1114 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1115 |
1116 | const job = await mediamachine
1117 | .transcodeToMp4({
1118 | height: 200,
1119 | width: 150,
1120 | successUrl: FAKE_SUCCESS_URL,
1121 | failureUrl: FAKE_FAILURE_URL,
1122 | watermark: mediamachine.imageWatermark({
1123 | url: "http://path.com/to/your/image",
1124 | width: 400,
1125 | height: 200,
1126 | }),
1127 | })
1128 | .fromUrl(FAKE_INPUT_URL)
1129 | .toUrl(FAKE_OUTPUT_URL);
1130 | expect(job.reqId).toEqual(reqId);
1131 | });
1132 |
1133 | test("with all required properties, and no width", async () => {
1134 | const expectedBody = {
1135 | apiKey: API_KEY,
1136 | successURL: FAKE_SUCCESS_URL,
1137 | failureURL: FAKE_FAILURE_URL,
1138 | inputURL: FAKE_INPUT_URL,
1139 | outputURL: FAKE_OUTPUT_URL,
1140 | encoder: "h264",
1141 | bitrateKBPS: "2000",
1142 | container: "mp4",
1143 | width: "720",
1144 | };
1145 |
1146 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1147 |
1148 | const job = await mediamachine
1149 | .transcodeToMp4({
1150 | successUrl: FAKE_SUCCESS_URL,
1151 | failureUrl: FAKE_FAILURE_URL,
1152 | })
1153 | .fromUrl(FAKE_INPUT_URL)
1154 | .toUrl(FAKE_OUTPUT_URL);
1155 | expect(job.reqId).toEqual(reqId);
1156 | });
1157 |
1158 | test("with all required properties, using AWS for input does not fail", async () => {
1159 | const expectedBody = {
1160 | apiKey: API_KEY,
1161 | successURL: FAKE_SUCCESS_URL,
1162 | failureURL: FAKE_FAILURE_URL,
1163 | inputURL: "s3://test-bucket/test-key",
1164 | inputCreds: {
1165 | region: FAKE_AWS_REGION,
1166 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
1167 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
1168 | },
1169 | outputURL: FAKE_OUTPUT_URL,
1170 | width: "150",
1171 | encoder: "h264",
1172 | bitrateKBPS: "2000",
1173 | container: "mp4",
1174 | };
1175 |
1176 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1177 |
1178 | const job = await mediamachine
1179 | .transcodeToMp4({
1180 | successUrl: FAKE_SUCCESS_URL,
1181 | failureUrl: FAKE_FAILURE_URL,
1182 | width: 150,
1183 | })
1184 | .fromS3(
1185 | FAKE_AWS_REGION,
1186 | FAKE_AWS_ACCESS_KEY_ID,
1187 | FAKE_AWS_SECRET_ACCESS_KEY,
1188 | "test-bucket",
1189 | "test-key"
1190 | )
1191 | .toUrl(FAKE_OUTPUT_URL);
1192 | expect(job.reqId).toEqual(reqId);
1193 | });
1194 |
1195 | test("with all required properties using Azure for input does not fail", async () => {
1196 | const expectedBody = {
1197 | apiKey: API_KEY,
1198 | successURL: FAKE_SUCCESS_URL,
1199 | failureURL: FAKE_FAILURE_URL,
1200 | inputURL: "azure://test-bucket/test-key",
1201 | inputCreds: {
1202 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
1203 | accountName: FAKE_AZURE_ACCOUNT_NAME,
1204 | },
1205 | width: "150",
1206 | outputURL: FAKE_OUTPUT_URL,
1207 | encoder: "h264",
1208 | bitrateKBPS: "2000",
1209 | container: "mp4",
1210 | };
1211 |
1212 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1213 |
1214 | const job = await mediamachine
1215 | .transcodeToMp4({
1216 | width: 150,
1217 | successUrl: FAKE_SUCCESS_URL,
1218 | failureUrl: FAKE_FAILURE_URL,
1219 | })
1220 | .fromAzure(
1221 | FAKE_AZURE_ACCOUNT_KEY,
1222 | FAKE_AZURE_ACCOUNT_NAME,
1223 | "test-bucket",
1224 | "test-key"
1225 | )
1226 | .toUrl(FAKE_OUTPUT_URL);
1227 | expect(job.reqId).toEqual(reqId);
1228 | });
1229 |
1230 | test("with all required properties using GCP for input does not fail", async () => {
1231 | const expectedBody = {
1232 | apiKey: API_KEY,
1233 | successURL: FAKE_SUCCESS_URL,
1234 | failureURL: FAKE_FAILURE_URL,
1235 | inputURL: "gcp://test-bucket/test-key",
1236 | inputCreds: {},
1237 | outputURL: FAKE_OUTPUT_URL,
1238 | encoder: "h264",
1239 | bitrateKBPS: "2000",
1240 | container: "mp4",
1241 | width: "150",
1242 | };
1243 |
1244 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1245 |
1246 | const job = await mediamachine
1247 | .transcodeToMp4({
1248 | width: 150,
1249 | successUrl: FAKE_SUCCESS_URL,
1250 | failureUrl: FAKE_FAILURE_URL,
1251 | })
1252 | .fromGCloud("{}", "test-bucket", "test-key")
1253 | .toUrl(FAKE_OUTPUT_URL);
1254 | expect(job.reqId).toEqual(reqId);
1255 | });
1256 |
1257 | test("with all required properties using Azure for output does not fail", async () => {
1258 | const expectedBody = {
1259 | apiKey: API_KEY,
1260 | successURL: FAKE_SUCCESS_URL,
1261 | failureURL: FAKE_FAILURE_URL,
1262 | inputURL: FAKE_INPUT_URL,
1263 | outputURL: "azure://test-bucket/test-key",
1264 | outputCreds: {
1265 | accountKey: FAKE_AZURE_ACCOUNT_KEY,
1266 | accountName: FAKE_AZURE_ACCOUNT_NAME,
1267 | },
1268 | encoder: "h264",
1269 | bitrateKBPS: "2000",
1270 | container: "mp4",
1271 | width: "150",
1272 | };
1273 |
1274 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1275 |
1276 | const job = await mediamachine
1277 | .transcodeToMp4({
1278 | width: 150,
1279 | successUrl: FAKE_SUCCESS_URL,
1280 | failureUrl: FAKE_FAILURE_URL,
1281 | })
1282 | .fromUrl(FAKE_INPUT_URL)
1283 | .toAzure(
1284 | FAKE_AZURE_ACCOUNT_KEY,
1285 | FAKE_AZURE_ACCOUNT_NAME,
1286 | "test-bucket",
1287 | "test-key"
1288 | );
1289 | expect(job.reqId).toEqual(reqId);
1290 | });
1291 |
1292 | test("with all required properties using GCP for output does not fail", async () => {
1293 | const expectedBody = {
1294 | apiKey: API_KEY,
1295 | successURL: FAKE_SUCCESS_URL,
1296 | failureURL: FAKE_FAILURE_URL,
1297 | inputURL: FAKE_INPUT_URL,
1298 | outputURL: "gcp://test-bucket/test-key",
1299 | outputCreds: {},
1300 | encoder: "h264",
1301 | bitrateKBPS: "2000",
1302 | container: "mp4",
1303 | width: "150",
1304 | };
1305 |
1306 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1307 |
1308 | const job = await mediamachine
1309 | .transcodeToMp4({
1310 | width: 150,
1311 | failureUrl: FAKE_FAILURE_URL,
1312 | successUrl: FAKE_SUCCESS_URL,
1313 | })
1314 | .fromUrl(FAKE_INPUT_URL)
1315 | .toGCloud("{}", "test-bucket", "test-key");
1316 | expect(job.reqId).toEqual(reqId);
1317 | });
1318 |
1319 | test("with all required properties with Blob for input and output does not fail", async () => {
1320 | const expectedBody = {
1321 | apiKey: API_KEY,
1322 | successURL: FAKE_SUCCESS_URL,
1323 | failureURL: FAKE_FAILURE_URL,
1324 | inputURL: "s3://test-bucket/test-key",
1325 | inputCreds: {
1326 | region: FAKE_AWS_REGION,
1327 | accessKeyId: FAKE_AWS_ACCESS_KEY_ID,
1328 | secretAccessKey: FAKE_AWS_SECRET_ACCESS_KEY,
1329 | },
1330 | outputURL: "gcp://test-bucket/test-key",
1331 | outputCreds: {},
1332 | encoder: "h264",
1333 | bitrateKBPS: "2000",
1334 | container: "mp4",
1335 | width: "150",
1336 | };
1337 |
1338 | mock.onPost(`${BASE_URL}/transcode`, expectedBody).reply(201, retData);
1339 |
1340 | const job = await mediamachine
1341 | .transcodeToMp4({
1342 | width: 150,
1343 | successUrl: FAKE_SUCCESS_URL,
1344 | failureUrl: FAKE_FAILURE_URL,
1345 | })
1346 | .fromS3(
1347 | FAKE_AWS_REGION,
1348 | FAKE_AWS_ACCESS_KEY_ID,
1349 | FAKE_AWS_SECRET_ACCESS_KEY,
1350 | "test-bucket",
1351 | "test-key"
1352 | )
1353 | .toGCloud("{}", "test-bucket", "test-key");
1354 | expect(job.reqId).toEqual(reqId);
1355 | });
1356 | });
1357 | });
1358 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | import { ThumbnailJob } from "./thumbnail";
2 | import { Blob } from "./blob";
3 | import { Container, Encoder, TranscodeJob, TranscodeOpts } from "./transcode";
4 | import { SummaryJob, SummaryType } from "./summary";
5 | import { parseApiKey } from "./utils";
6 | import { WorkerConfig } from "./WorkerConfig";
7 | import {
8 | ImageWatermark,
9 | ImageWatermarkOptions,
10 | TextWatermark,
11 | TextWatermarkOptions,
12 | Watermark,
13 | } from "./watermark";
14 | import { Executable } from "./Executable";
15 | import { WorkerTarget } from "./WorkerTarget";
16 |
17 | // mp4 transcoding
18 |
19 | class TranscodeMp4Target extends WorkerTarget {
20 | workerConfig: TranscodeJob;
21 |
22 | constructor(transcoder: TranscodeJob) {
23 | super(transcoder);
24 | }
25 | }
26 |
27 | interface TranscodeMp4Options {
28 | height?: number;
29 | width?: number;
30 | watermark?: Watermark;
31 | encoder?: Encoder;
32 | successUrl?: string;
33 | failureUrl?: string;
34 | }
35 |
36 | class TranscoderMp4 extends WorkerConfig {
37 | options: TranscodeMp4Options;
38 |
39 | constructor(apiKey: string, opts: TranscodeMp4Options) {
40 | super(apiKey, TranscodeMp4Target);
41 | this.options = opts;
42 | }
43 |
44 | getExecutable(fromConfig: string | Blob) {
45 | const opts = new TranscodeOpts();
46 | const options = this.options;
47 | opts.container(Container.MP4);
48 | if (options.encoder) {
49 | opts.encoder(options.encoder);
50 | }
51 |
52 | let config = new TranscodeJob(this.apiKey)
53 | .from(fromConfig)
54 | .webhooks({
55 | successUrl: options.successUrl,
56 | failureUrl: options.failureUrl,
57 | })
58 | .opts(opts);
59 |
60 | if (options.width) {
61 | config = config.width(options.width);
62 | }
63 | if (options.height) {
64 | config = config.height(options.height);
65 | }
66 |
67 | if (options.watermark) {
68 | config = config.watermark(options.watermark);
69 | }
70 | return config;
71 | }
72 | }
73 |
74 | // transcoding
75 |
76 | class TranscodeWebmTarget extends WorkerTarget {
77 | workerConfig: TranscodeJob;
78 |
79 | constructor(transcoder: TranscodeJob) {
80 | super(transcoder);
81 | }
82 | }
83 |
84 | interface TranscodeWebmOptions {
85 | height?: number;
86 | width?: number;
87 | watermark?: Watermark;
88 | encoder: "vp8" | "vp9";
89 | successUrl?: string;
90 | failureUrl?: string;
91 | }
92 |
93 | class TranscoderWebm extends WorkerConfig {
94 | options: TranscodeWebmOptions;
95 |
96 | constructor(apiKey: string, opts: TranscodeWebmOptions) {
97 | super(apiKey, TranscodeWebmTarget);
98 | this.options = opts;
99 | }
100 |
101 | getExecutable(fromConfig: string | Blob) {
102 | const opts = new TranscodeOpts();
103 | opts.container(Container.WEBM);
104 | const options = this.options;
105 | if (options.encoder) {
106 | opts.encoder(options.encoder);
107 | }
108 |
109 | let config = new TranscodeJob(this.apiKey)
110 | .from(fromConfig)
111 | .webhooks({
112 | successUrl: options.successUrl,
113 | failureUrl: options.failureUrl,
114 | })
115 | .opts(opts);
116 |
117 | if (options.width) {
118 | config = config.width(options.width);
119 | }
120 | if (options.height) {
121 | config = config.height(options.height);
122 | }
123 |
124 | if (options.watermark) {
125 | config = config.watermark(options.watermark);
126 | }
127 | return config;
128 | }
129 | }
130 |
131 | interface ThumbnailOptions {
132 | width?: number;
133 | watermarkText?: string;
134 | watermark?: Watermark;
135 | successUrl?: string;
136 | failureUrl?: string;
137 | }
138 |
139 | class ThumbnailTarget extends WorkerTarget {
140 | thumbnailer: Thumbnailer;
141 | inputBlob: Blob;
142 |
143 | constructor(thumbnailer: ThumbnailJob) {
144 | super(thumbnailer);
145 | }
146 | }
147 |
148 | class Thumbnailer extends WorkerConfig {
149 | options: ThumbnailOptions;
150 |
151 | constructor(apiKey: string, options: ThumbnailOptions) {
152 | super(apiKey, ThumbnailTarget);
153 | this.options = options;
154 | }
155 | getExecutable(fromConfig: string | Blob) {
156 | const options = this.options;
157 |
158 | let config = new ThumbnailJob(this.apiKey).from(fromConfig).webhooks({
159 | successUrl: options.successUrl,
160 | failureUrl: options.failureUrl,
161 | });
162 |
163 | if (options.width) {
164 | config = config.width(150);
165 | }
166 |
167 | if (options.watermark) {
168 | config = config.watermark(options.watermark);
169 | }
170 |
171 | return config;
172 | }
173 | }
174 |
175 | interface SummaryOptions {
176 | width?: number;
177 | watermark?: Watermark;
178 | format?: SummaryType;
179 | removeAudio?: boolean;
180 | successUrl?: string;
181 | failureUrl?: string;
182 | }
183 |
184 | class SummaryTarget extends WorkerTarget {
185 | summarizer: Summarizer;
186 |
187 | constructor(summarizer: SummaryJob) {
188 | super(summarizer);
189 | }
190 | }
191 |
192 | class Summarizer extends WorkerConfig {
193 | options: SummaryOptions;
194 |
195 | constructor(apiKey: string, opts: SummaryOptions) {
196 | super(apiKey, SummaryTarget);
197 | this.options = opts;
198 | }
199 | getExecutable(fromConfig: string | Blob): Executable {
200 | const options = this.options;
201 |
202 | let config = new SummaryJob(this.apiKey).from(fromConfig).webhooks({
203 | successUrl: options.successUrl,
204 | failureUrl: options.failureUrl,
205 | });
206 |
207 | if (options.width) {
208 | config = config.width(150);
209 | }
210 |
211 | config = config.type(options.format ? options.format : "gif");
212 |
213 | if (options.watermark) {
214 | config = config.watermark(options.watermark);
215 | }
216 | options.removeAudio = !!options.removeAudio;
217 | if (options.removeAudio) {
218 | config = config.removeAudio(options.removeAudio);
219 | }
220 | return config;
221 | }
222 | }
223 |
224 | // MediaMachine
225 | // ==============================
226 |
227 | export class MediaMachine {
228 | apiKey: string;
229 |
230 | constructor(apiKey: string) {
231 | parseApiKey(apiKey);
232 | this.apiKey = apiKey;
233 | }
234 |
235 | transcodeToWebm(opts: TranscodeWebmOptions): TranscoderWebm {
236 | return new TranscoderWebm(this.apiKey, opts);
237 | }
238 |
239 | transcodeToMp4(opts: TranscodeMp4Options): TranscoderMp4 {
240 | return new TranscoderMp4(this.apiKey, opts);
241 | }
242 |
243 | thumbnail(opts: ThumbnailOptions): Thumbnailer {
244 | return new Thumbnailer(this.apiKey, opts);
245 | }
246 |
247 | summary(opts: SummaryOptions): Summarizer {
248 | return new Summarizer(this.apiKey, opts);
249 | }
250 |
251 | textWatermark(text: string, opts: TextWatermarkOptions = {}): TextWatermark {
252 | return new TextWatermark(text, opts);
253 | }
254 | imageWatermark(opts: ImageWatermarkOptions = {}): ImageWatermark {
255 | return new ImageWatermark(opts);
256 | }
257 | }
258 |
259 | export { Job } from "./job";
260 |
--------------------------------------------------------------------------------
/src/job.ts:
--------------------------------------------------------------------------------
1 | import { API, JobStatus } from "./api";
2 |
3 | export class Job {
4 | reqId: string;
5 |
6 | constructor(reqId: string) {
7 | this.reqId = reqId;
8 | }
9 |
10 | async status(): Promise {
11 | return await API.jobStatus(this.reqId);
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/summary.ts:
--------------------------------------------------------------------------------
1 | import { API } from "./api";
2 | import { removeUndefinedFromObj } from "./utils";
3 | import { Blob } from "./blob";
4 | import { Job } from "./job";
5 | import { Webhooks } from "./webhooks";
6 | import { Executable } from "./Executable";
7 | import { Watermark } from "./watermark";
8 |
9 | export type SummaryType = "mp4" | "gif";
10 |
11 | export class SummaryJob implements Executable {
12 | apiKey: string;
13 | successUrl?: string;
14 | failureUrl?: string;
15 | inputUrl?: string;
16 | inputBlob?: Blob;
17 | outputUrl?: string;
18 | outputBlob?: Blob;
19 | summaryWidth?: number;
20 | summaryWatermark?: Watermark;
21 | summaryType: SummaryType;
22 | summaryRemoveAudio?: boolean;
23 |
24 | constructor(apikey: string) {
25 | this.summaryWidth = 720;
26 | this.apiKey = apikey;
27 | }
28 |
29 | webhooks(webhooks: Webhooks): SummaryJob {
30 | this.successUrl = webhooks.successUrl;
31 | this.failureUrl = webhooks.failureUrl;
32 | return this;
33 | }
34 |
35 | from(source: string | Blob): SummaryJob {
36 | if (typeof source === "string") {
37 | this.inputUrl = source;
38 | } else {
39 | this.inputBlob = source;
40 | }
41 |
42 | return this;
43 | }
44 |
45 | to(destination: string | Blob): SummaryJob {
46 | if (typeof destination === "string") {
47 | this.outputUrl = destination;
48 | } else {
49 | this.outputBlob = destination;
50 | }
51 | return this;
52 | }
53 |
54 | watermark(watermark: Watermark): SummaryJob {
55 | this.summaryWatermark = watermark;
56 | return this;
57 | }
58 |
59 | type(type: SummaryType): SummaryJob {
60 | this.summaryType = type;
61 | return this;
62 | }
63 |
64 | width(width: number): SummaryJob {
65 | this.summaryWidth = width;
66 | return this;
67 | }
68 |
69 | removeAudio(value: boolean): SummaryJob {
70 | this.summaryRemoveAudio = value;
71 | return this;
72 | }
73 |
74 | async execute() {
75 | let jobType = "gif_summary";
76 | if (this.summaryType === "mp4") {
77 | jobType = "mp4_summary";
78 | }
79 |
80 | if (this.apiKey === null) {
81 | throw new Error("Missing apiKey");
82 | }
83 |
84 | if (this.apiKey.trim() == "") {
85 | throw new Error("Missing apiKey");
86 | }
87 |
88 | const emptyInputUrl = !this.inputUrl || this.inputUrl.trim() === "";
89 |
90 | if (!this.inputBlob && emptyInputUrl) {
91 | throw new Error("Missing inputBlob or inputUrl");
92 | }
93 |
94 | const emptyOutputUrl = !this.outputUrl || this.outputUrl.trim() == "";
95 | if (!this.outputBlob && emptyOutputUrl) {
96 | throw new Error("Missing outputBlob or outputUrl");
97 | }
98 |
99 | if (!this.summaryType) {
100 | throw new Error("Missing summaryType");
101 | }
102 |
103 | const body = {
104 | apiKey: this.apiKey,
105 | successURL: this.successUrl,
106 | failureURL: this.failureUrl,
107 | inputCreds: this.inputBlob?.toApiCredentials(),
108 | outputCreds: this.outputBlob?.toApiCredentials(),
109 | inputURL: this.inputUrl || this.inputBlob.toApiUrl(),
110 | outputURL: this.outputUrl || this.outputBlob.toApiUrl(),
111 | width: `${this.summaryWidth}`,
112 | watermark: this.summaryWatermark?.toJSON(),
113 | removeAudio: this.summaryRemoveAudio,
114 | };
115 |
116 | const resp = await API.createJob(jobType, removeUndefinedFromObj(body));
117 |
118 | const job = new Job(resp.data.id);
119 | return job;
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/src/thumbnail.ts:
--------------------------------------------------------------------------------
1 | import { API } from "./api";
2 | import { removeUndefinedFromObj } from "./utils";
3 | import { Watermark } from "./watermark";
4 | import { Blob } from "./blob";
5 | import { Webhooks } from "./webhooks";
6 | import { Job } from "./job";
7 | import { Executable } from "./Executable";
8 |
9 | export class ThumbnailJob implements Executable {
10 | apiKey: string;
11 | successUrl?: string;
12 | failureUrl?: string;
13 | inputUrl?: string;
14 | inputBlob?: Blob;
15 | outputUrl?: string;
16 | outputBlob?: Blob;
17 | thumbWidth?: number;
18 | thumbWatermark?: Watermark;
19 |
20 | constructor(apiKey: string) {
21 | this.thumbWidth = 720;
22 | this.apiKey = apiKey;
23 | }
24 |
25 | webhooks(webhooks: Webhooks): ThumbnailJob {
26 | this.successUrl = webhooks.successUrl;
27 | this.failureUrl = webhooks.failureUrl;
28 | return this;
29 | }
30 |
31 | from(source: string | Blob): ThumbnailJob {
32 | if (typeof source === "string") {
33 | this.inputUrl = source;
34 | } else {
35 | this.inputBlob = source;
36 | }
37 |
38 | return this;
39 | }
40 |
41 | to(destination: string | Blob): ThumbnailJob {
42 | if (typeof destination === "string") {
43 | this.outputUrl = destination;
44 | } else {
45 | this.outputBlob = destination;
46 | }
47 | return this;
48 | }
49 |
50 | watermark(watermark: Watermark): ThumbnailJob {
51 | this.thumbWatermark = watermark;
52 | return this;
53 | }
54 |
55 | width(width: number): ThumbnailJob {
56 | this.thumbWidth = width;
57 | return this;
58 | }
59 |
60 | async execute() {
61 | if (this.apiKey === null) {
62 | throw new Error("Missing apiKey");
63 | }
64 |
65 | if (this.apiKey.trim() == "") {
66 | throw new Error("Missing apiKey");
67 | }
68 |
69 | const emptyInputUrl = !this.inputUrl || this.inputUrl.trim() === "";
70 |
71 | if (!this.inputBlob && emptyInputUrl) {
72 | throw new Error("Missing inputBlob or inputUrl");
73 | }
74 |
75 | const emptyOutputUrl = !this.outputUrl || this.outputUrl.trim() == "";
76 | if (!this.outputBlob && emptyOutputUrl) {
77 | throw new Error("Missing outputBlob or outputUrl");
78 | }
79 |
80 | const body: any = {
81 | apiKey: this.apiKey,
82 | inputCreds: this.inputBlob?.toApiCredentials(),
83 | outputCreds: this.outputBlob?.toApiCredentials(),
84 | successURL: this.successUrl,
85 | failureURL: this.failureUrl,
86 | inputURL: this.inputUrl || this.inputBlob.toApiUrl(),
87 | outputURL: this.outputUrl || this.outputBlob.toApiUrl(),
88 | width: `${this.thumbWidth}`,
89 | watermark: this.thumbWatermark?.toJSON(),
90 | };
91 |
92 | const resp = await API.createJob("thumbnail", removeUndefinedFromObj(body));
93 |
94 | const job = new Job(resp.data.id);
95 | return job;
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/src/transcode.ts:
--------------------------------------------------------------------------------
1 | import { API } from "./api";
2 | import { removeUndefinedFromObj } from "./utils";
3 | import { Blob } from "./blob";
4 | import { Watermark } from "./watermark";
5 | import { Job } from "./job";
6 | import { Webhooks } from "./webhooks";
7 | import { Executable } from "./Executable";
8 |
9 | export type Encoder = "h264" | "h265" | "vp8" | "vp9";
10 |
11 | export type Bitrate = "1000" | "2000" | "4000";
12 |
13 | export enum Container {
14 | MP4 = "mp4",
15 | WEBM = "webm",
16 | }
17 |
18 | export class TranscodeJob implements Executable {
19 | apiKey: string;
20 | successUrl?: string;
21 | failureUrl?: string;
22 | inputUrl?: string;
23 | inputBlob?: Blob;
24 | outputUrl?: string;
25 | outputBlob?: Blob;
26 | transcodeWidth?: number;
27 | transcodeHeight?: number;
28 | transcodeWatermark?: Watermark;
29 | transcodeOpts: TranscodeOpts;
30 |
31 | constructor(apiKey: string) {
32 | this.transcodeWidth = 720;
33 | this.apiKey = apiKey;
34 | }
35 |
36 | webhooks(webhooks: Webhooks): TranscodeJob {
37 | this.successUrl = webhooks.successUrl;
38 | this.failureUrl = webhooks.failureUrl;
39 | return this;
40 | }
41 |
42 | from(source: string | Blob): TranscodeJob {
43 | if (typeof source === "string") {
44 | this.inputUrl = source;
45 | } else {
46 | this.inputBlob = source;
47 | }
48 |
49 | return this;
50 | }
51 |
52 | to(destination: string | Blob): TranscodeJob {
53 | if (typeof destination === "string") {
54 | this.outputUrl = destination;
55 | } else {
56 | this.outputBlob = destination;
57 | }
58 | return this;
59 | }
60 |
61 | watermark(watermark: Watermark): TranscodeJob {
62 | this.transcodeWatermark = watermark;
63 | return this;
64 | }
65 |
66 | width(value: number): TranscodeJob {
67 | this.transcodeWidth = value;
68 | return this;
69 | }
70 |
71 | height(value: number): TranscodeJob {
72 | this.transcodeHeight = value;
73 | return this;
74 | }
75 |
76 | opts(value: TranscodeOpts): TranscodeJob {
77 | this.transcodeOpts = value;
78 | return this;
79 | }
80 |
81 | async execute() {
82 | if (this.apiKey === null) {
83 | throw new Error("Missing apiKey");
84 | }
85 |
86 | if (this.apiKey.trim() == "") {
87 | throw new Error("Missing apiKey");
88 | }
89 |
90 | const emptyInputUrl = !this.inputUrl || this.inputUrl.trim() === "";
91 |
92 | if (!this.inputBlob && emptyInputUrl) {
93 | throw new Error("Missing inputBlob or inputUrl");
94 | }
95 |
96 | const emptyOutputUrl = !this.outputUrl || this.outputUrl.trim() == "";
97 | if (!this.outputBlob && emptyOutputUrl) {
98 | throw new Error("Missing outputBlob or outputUrl");
99 | }
100 |
101 | if (!this.transcodeOpts) {
102 | throw new Error("Missing transcodeOpts");
103 | }
104 |
105 | const body = {
106 | apiKey: this.apiKey,
107 | successURL: this.successUrl,
108 | failureURL: this.failureUrl,
109 | inputCreds: this.inputBlob?.toApiCredentials(),
110 | outputCreds: this.outputBlob?.toApiCredentials(),
111 | inputURL: this.inputUrl || this.inputBlob.toApiUrl(),
112 | outputURL: this.outputUrl || this.outputBlob.toApiUrl(),
113 | width: `${this.transcodeWidth}`,
114 | height: !!this.transcodeHeight ? `${this.transcodeHeight}` : undefined,
115 | watermark: this.transcodeWatermark?.toJSON(),
116 | ...this.transcodeOpts?.toJSON(),
117 | };
118 |
119 | const resp = await API.createJob("transcode", removeUndefinedFromObj(body));
120 |
121 | const job = new Job(resp.data.id);
122 | return job;
123 | }
124 | }
125 |
126 | export class TranscodeOpts {
127 | transcoderEncoder: Encoder;
128 | transcoderBitrateKbps: Bitrate;
129 | transcoderContainer: Container;
130 |
131 | constructor() {
132 | this.transcoderEncoder = "h264";
133 | this.transcoderBitrateKbps = "2000";
134 | this.transcoderContainer = Container.MP4;
135 | }
136 |
137 | encoder(value: Encoder): TranscodeOpts {
138 | this.transcoderEncoder = value;
139 | return this;
140 | }
141 |
142 | bitrateKbps(value: Bitrate): TranscodeOpts {
143 | this.transcoderBitrateKbps = value;
144 | return this;
145 | }
146 |
147 | container(value: Container): TranscodeOpts {
148 | this.transcoderContainer = value;
149 | return this;
150 | }
151 |
152 | toJSON() {
153 | return {
154 | encoder: this.transcoderEncoder,
155 | bitrateKBPS: this.transcoderBitrateKbps,
156 | container: this.transcoderContainer,
157 | };
158 | }
159 | }
160 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | export function removeUndefinedFromObj(obj: Record) {
2 | Object.keys(obj).forEach(key => obj[key] === undefined ? delete obj[key] : {});
3 | return obj;
4 | }
5 |
6 | interface ParsedApiKey {
7 | key: string;
8 | name: string;
9 | isTesting: boolean;
10 | }
11 |
12 | export const parseApiKey = (inKey: string): ParsedApiKey => {
13 | // check the LIVE- / test- prefix
14 | if (!/^LIVE\-|test\-/.test(inKey)) {
15 | throw new Error("bad key format");
16 | }
17 | // chop off the LIVE- / test- prefix
18 | const key = inKey.substring(5);
19 | const matcher = /[a-f0-9]{8}\-[a-f0-9]{4}\-[a-f0-9]{4}\-[a-f0-9]{4}\-[a-f0-9]{12}$/;
20 | const results = key.match(matcher);
21 |
22 | if (!results) {
23 | throw new Error("bad key format");
24 | }
25 | if (!results.index) {
26 | throw new Error("bad key format");
27 | }
28 | const retval = {
29 | key: key.slice(results.index),
30 | name: key.slice(0, results.index - 1),
31 | isTesting: inKey.indexOf("LIVE-") === -1,
32 | };
33 | return retval;
34 | };
--------------------------------------------------------------------------------
/src/watermark.ts:
--------------------------------------------------------------------------------
1 | import { removeUndefinedFromObj } from "./utils";
2 |
3 | export type Position = "topLeft" | "topRight" | "bottomLeft" | "bottomRight";
4 |
5 | export type Watermark = TextWatermark | ImageWatermark;
6 |
7 | export interface TextWatermarkOptions {
8 | fontSize?: number;
9 | fontColor?: string;
10 | opacity?: number;
11 | position?: Position;
12 | }
13 |
14 | export interface ImageWatermarkOptions {
15 | url?: string;
16 | uploaded_image_name?: string;
17 | width?: number;
18 | height?: number;
19 | opacity?: number;
20 | position?: Position;
21 | }
22 |
23 | export class ImageWatermark implements ImageWatermarkOptions {
24 | path?: string;
25 | image_name?: string;
26 | width?: number;
27 | height?: number;
28 | opacity?: number;
29 | position: Position;
30 |
31 | constructor(opts: ImageWatermarkOptions = {}) {
32 | this.position = opts.position || "bottomRight";
33 | if (opts.height) {
34 | this.height = opts.height;
35 | }
36 | if (opts.width) {
37 | this.width = opts.width;
38 | }
39 | if (opts.url) {
40 | this.path = opts.url;
41 | }
42 | if (opts.uploaded_image_name) {
43 | this.image_name = opts.uploaded_image_name;
44 | }
45 | if (opts.url) {
46 | this.path = opts.url;
47 | }
48 |
49 | if (opts.opacity === 0) {
50 | this.opacity = 0;
51 | } else {
52 | this.opacity = opts.opacity || 0.9;
53 | }
54 | }
55 |
56 | toJSON() {
57 | const ret = {
58 | width: this.width,
59 | height: this.height,
60 | imageName: this.image_name,
61 | imageUrl: this.path,
62 | opacity: `${this.opacity}`,
63 | position: this.position,
64 | };
65 |
66 | return removeUndefinedFromObj(ret);
67 | }
68 | }
69 |
70 | export class TextWatermark implements TextWatermarkOptions {
71 | text: string;
72 | fontSize?: number;
73 | fontColor?: string;
74 | opacity?: number;
75 | position: Position;
76 |
77 | constructor(text: string, opts: TextWatermarkOptions = {}) {
78 | this.text = text;
79 | this.fontSize = opts.fontSize || 12;
80 | this.fontColor = opts.fontColor || "white";
81 | this.position = opts.position || "bottomRight";
82 | if (opts.opacity === 0) {
83 | this.opacity = 0;
84 | } else {
85 | this.opacity = opts.opacity || 0.9;
86 | }
87 | }
88 |
89 | toJSON() {
90 | const ret = {
91 | fontSize: `${this.fontSize}`,
92 | text: this.text,
93 | fontColor: this.fontColor,
94 | opacity: `${this.opacity}`,
95 | position: this.position,
96 | };
97 |
98 | return removeUndefinedFromObj(ret);
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/src/webhooks.ts:
--------------------------------------------------------------------------------
1 | export interface Webhooks {
2 | successUrl?: string;
3 | failureUrl?: string;
4 | }
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "outDir": "./lib",
4 | "target": "es5",
5 | "lib": ["dom", "es2015"],
6 | "declaration": true
7 | }
8 | }
9 |
--------------------------------------------------------------------------------