├── .editorconfig ├── .gitignore ├── ExtendScript ├── adjust-target-workarea.jsx ├── event-logger.jsx ├── font-swapping.jsx └── truncate-long-string.jsx ├── LICENSE ├── NodeJS ├── concatenate.js ├── deploy-output │ ├── README.md │ ├── api.js │ ├── app.js │ ├── aws.js │ ├── config.js │ ├── constants.js │ ├── credentials │ │ ├── aws-credentials.json │ │ ├── google-service-account-creds.json │ │ ├── jwplatform-credentials.json │ │ ├── templater-uploader-vimeo.json │ │ └── templater-uploader-youtube.json │ ├── deploy.js │ ├── gsheet.js │ ├── jwplatform.js │ ├── logger.js │ ├── publish.js │ ├── stream.js │ ├── vimeo.js │ └── youtube.js ├── log-footage-download.js ├── log-footage-processing.js ├── package-sequence.js ├── post_job.js ├── transcode-after-job.js └── watch-logs.bat ├── README.md ├── Windows ├── event-logger.bat ├── on-bot-disable-win.php ├── on-bot-disable.bat ├── post-batch.bat └── post-job.bat ├── macOS ├── event-logger.sh ├── on-bot-disable.php ├── on-bot-disable.sh ├── post-batch.sh ├── post-job.sh └── spot-logger.sh ├── package-lock.json └── package.json /.editorconfig: -------------------------------------------------------------------------------- 1 | # This file is for unifying the coding style for different editors and IDEs 2 | # editorconfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | indent_style = space 12 | indent_size = 4 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | *.monopic 4 | /.vscode/launch.json 5 | /ExtendScript/launch.json 6 | /NodeJS/deploy-output/deploy-err.log 7 | /NodeJS/deploy-output/deploy-out.log 8 | /NodeJS/transcode_err.log 9 | /NodeJS/transcode_out.log 10 | /NodeJS/deploy-output/test-cmd.txt 11 | /NodeJS/deploy-output/registered-events.txt 12 | /NodeJS/deploy-output/dependencies.js 13 | /Tests 14 | -------------------------------------------------------------------------------- /ExtendScript/adjust-target-workarea.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2018 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | This code makes use of the Templater ExtendScript API which is 29 | documented at the following address: 30 | 31 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 32 | 33 | Follow these steps to use this script: 34 | 35 | 1. Add two columns or properties to your data source named 36 | `workarea-start` and `workarea-end`. 37 | 38 | 2. In your data source, for each job's `workarea-start` value, enter 39 | the frame number where you want the work area in the target 40 | composition to begin. Then, for each job's `workarea-end` value, 41 | enter the frame number where you want the work area in the target 42 | composition to end. 43 | 44 | 3. Register this script file with Templater's "After Update" event. 45 | 46 | 4. Run a batch render job with Templater with rows or objects that 47 | have different `workarea-start` and `workarea-end` values. 48 | The output corresponding to each row or object has a different 49 | work area. 50 | 51 | */ 52 | 53 | 54 | var targetComp = $D.target(), 55 | comp_fps = targetComp.frameRate; 56 | f_start = parseInt($D.job.get("workarea-start")); 57 | f_end = parseInt($D.job.get("workarea-end")); 58 | 59 | targetComp.workAreaStart = (f_start / comp_fps); 60 | targetComp.workAreaDuration = (f_end / comp_fps) - targetComp.workAreaStart; -------------------------------------------------------------------------------- /ExtendScript/event-logger.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2018 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | This code makes use of the Templater ExtendScript API documented here: 29 | 30 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 31 | 32 | Follow these steps to use this script: 33 | 34 | 1. Register this script file to any Templater event, or all events. 35 | 36 | 2. Open an After Effects project that is mapped to a data source 37 | using Templater. 38 | 39 | 3. Open the main Templater panel. Iterate through jobs in the data 40 | source using Templater's "Preview" feature. 41 | 42 | 4. Inspect the `templater.log` file and note all the messages this 43 | code writes to that file. 44 | 45 | */ 46 | 47 | var message, harvest, dl, dynamic_layer_names, 48 | footage = {}; 49 | job_props = {}; 50 | 51 | //Log what event was just broadcast 52 | $D.log.msg('JSX SCRIPT', "))) EVENT BROADCAST (((", $D.event()); 53 | 54 | if ($D.event() === 'bot_pre_data' ) { //Handling pre-data event 55 | 56 | $D.log.msg('JSX SCRIPT', "Re-versioing After Effects Project File with [ " + $D.task() + " ] task", File(app.project.file).fsName); 57 | 58 | } else if ($D.event() === 'bot_post_data') { //Handling post-data event 59 | 60 | message = "Retreived data from position " + $D.range().start + " to " + $D.range().end; 61 | $D.log.msg('JSX SCRIPT', message, $D.batch.get()); 62 | 63 | $D.log.msg('JSX SCRIPT', "Listing all the `title` values in this batch", $D.batch.get('title')); 64 | 65 | } else if ($D.event() === 'bot_pre_job') { //Handling pre-job event 66 | 67 | message = "Proceeding to re-version AEP for job [ " + $D.job.get('id') + " ]"; 68 | $D.log.msg('JSX SCRIPT', message); 69 | 70 | } else if ($D.event() === 'bot_pre_dl') { //Handling pre-dl event 71 | 72 | dl = $D.download(); 73 | message = "Proceeding to download remote footage"; 74 | $D.log.msg('JSX SCRIPT', message, dl.to_console(true)); 75 | 76 | } else if ($D.event() === 'bot_pre_ftg') { //Handling pre-ftg event 77 | 78 | footage.layer = $D.footage.layer() || null; 79 | footage.source = $D.footage.source() || null; 80 | footage.item = $D.footage.item() || null; 81 | 82 | message = "Proceeding to process footage"; 83 | if (footage.layer) { 84 | $D.log.msg('JSX SCRIPT', "Layer footage mapped to", footage.layer.name); 85 | $D.log.msg('JSX SCRIPT', "Layer source", footage.layer.source.file.fsName); 86 | } 87 | 88 | if (footage.source) { 89 | $D.log.msg('JSX SCRIPT', "Path to footage source", footage.source); 90 | } 91 | 92 | if (footage.item) { 93 | $D.log.msg('JSX SCRIPT', "Footage item ID in AE project", footage.item.id); 94 | } 95 | 96 | 97 | } else if ($D.event() === 'bot_pre_layr') { //Handling pre-update event 98 | 99 | harvest = $D.harvest(); 100 | dynamic_layer_names = []; 101 | 102 | message = "Proceeding to update following layers for job [ " + $D.job.get('id') + " ]"; 103 | 104 | for (var layer_type in harvest) { 105 | 106 | for (var i=0; i < layer_type.length; i++) { 107 | if (harvest[layer_type][i]) dynamic_layer_names.push((harvest[layer_type][i]).name); 108 | } 109 | 110 | } 111 | 112 | $D.log.msg('JSX SCRIPT', message, dynamic_layer_names); 113 | 114 | 115 | } else if ($D.event() === 'bot_post_layr') { //Handling post update event 116 | 117 | $D.log.msg('JSX SCRIPT', "Finished updating layers"); 118 | 119 | } else if ($D.event() === 'bot_pre_rndr') { //Handling pre output events 120 | 121 | $D.log.msg('JSX SCRIPT', "Creating output for job [ " + $D.job.get('id') + " ]"); 122 | 123 | } else if ($D.event() === 'bot_post_rndr') { 124 | 125 | $D.log.msg('JSX SCRIPT', "Finished creating output") 126 | 127 | job_props.aep_loc = Folder(app.project.file.parent).fsName; 128 | job_props.aep = File(app.project.file).fsName; 129 | job_props.log = $D.log.file().fsName; 130 | job_props.data_job = $D.job.file().fsName; 131 | job_props.output_loc = $D.output().loc.fsName; 132 | job_props.output_name = $D.output().name; 133 | job_props.bot_name = $D.bot_id(); 134 | job_props.module = $D.output().module; 135 | job_props.template = $D.output().template; 136 | 137 | $D.log.msg('JSX SCRIPT', "Details for most recently processed job", job_props.to_console(true)); 138 | 139 | } else if ($D.event() === 'bot_post_job') { 140 | 141 | message = "Re-versioning job [ " + $D.job.get('id') + " ] is now complete! File containing job information" 142 | $D.log.msg('JSX SCRIPT', message, $D.job.file().fsName); 143 | 144 | } else if ($D.event() === 'bot_post_dl') { 145 | 146 | message = "After Footage Download Event!" 147 | dl = $D.download(); 148 | $D.log.msg('JSX SCRIPT', message, dl.to_console(true)); 149 | 150 | } else if ($D.event() === 'bot_post_ftg') { 151 | 152 | message = "After Footage Processing Event!" 153 | footage.layer = $D.footage.layer() || null; 154 | footage.source = $D.footage.source() || null; 155 | footage.item = $D.footage.item() || null; 156 | 157 | message = "Proceeding to process footage"; 158 | if (footage.layer) { 159 | $D.log.msg('JSX SCRIPT', "Layer footage mapped to", footage.layer.name); 160 | $D.log.msg('JSX SCRIPT', "Layer source", footage.layer.source.file.fsName); 161 | } 162 | 163 | if (footage.source) { 164 | $D.log.msg('JSX SCRIPT', "Path to footage source", footage.source); 165 | } 166 | 167 | if (footage.item) { 168 | $D.log.msg('JSX SCRIPT', "Footage item ID in AE project", footage.item.id); 169 | } 170 | 171 | } else if ($D.event() === 'bot_post_batch') { 172 | 173 | $D.log.msg('JSX SCRIPT', "File containing batch data", $D.batch.file().fsName); 174 | 175 | } else if ($D.event() === 'bot_on_enable') { 176 | 177 | $D.log.msg('JSX SCRIPT', "The bot was just enabled. Waiting to process data."); 178 | 179 | } else if ($D.event() === 'bot_on_shutdown') { 180 | 181 | $D.log.msg('JSX SCRIPT', "The bot was just shut down. No longer waiting for data."); 182 | 183 | } 184 | -------------------------------------------------------------------------------- /ExtendScript/font-swapping.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2019 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | Written by Jon Christoffersen / jon@dataclay.com 29 | Based on a script by Shane Murphy @ prender.co 30 | 31 | This code makes use of the Templater ExtendScript API documented here: 32 | 33 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 34 | 35 | Follow these steps to use this script: 36 | 37 | 1. Register this script file on "Before Update" Event in Templater Preferences ExtendScript Setup. 38 | - Font key/column in data source must match the name of a text layer that is tagged with the Templater Settings effect, adding a "--font" tag at the end of the name 39 | -- example a text layer named "text-1" would have a font control column/key in the data source called "text-1--font" 40 | - Font name in data source must be the Postscript Name 41 | 42 | 2. Open an After Effects project that is mapped to a data source using Templater. 43 | 44 | 3. Open the main Templater panel. Iterate through jobs in the data source using Templater's "Preview" feature. 45 | 46 | 4. Inspect the `templater.log` file and note all the messages this code writes to that file. 47 | 48 | */ 49 | 50 | // Logging messages 51 | var textLabel= "FONT CHANGE"; 52 | var scriptErr = "Script error on line "; 53 | var textMsg = "Full list of dynamic text layers"; 54 | var textMsg2 = "Data flagged from source"; 55 | var textMsg3 = "Checking project for layers named..."; 56 | var textMsg4 = "Updating font for selected layers..."; 57 | 58 | // loop over all text layer objects and add names to array 59 | var textLayers = $D.harvest().text; 60 | var i; 61 | var layerNames = []; 62 | 63 | for (i = 0; i < textLayers.length; ++i){ 64 | layerName = textLayers[i].name; 65 | layerNames.push(layerName); 66 | } 67 | 68 | // log text layers to templater.log file 69 | try{ 70 | $D.log.msg(textLabel, textMsg, layerNames); 71 | } catch (err){ 72 | $D.log.msg(textLabel, textMsg, scriptErr + err.line + " : " + err.toString()); 73 | } 74 | 75 | // push data from job.get object that is tagged with "--font" into an array 76 | var taggedData = []; 77 | var dataFromSource = $D.job.get(); 78 | var isLabelFont = "--font"; 79 | 80 | for ( var key in dataFromSource ){ 81 | if (dataFromSource.hasOwnProperty(key)){ 82 | if(key.search(isLabelFont)!=-1){ 83 | taggedData.push(key); 84 | } 85 | } 86 | } 87 | // log keys from data object to an array in templater.log 88 | try{ 89 | $D.log.msg(textLabel, textMsg2, taggedData); 90 | } catch (err){ 91 | $D.log.msg(textLabel, textMsg2, scriptErr + err.line + " : " + err.toString()); 92 | } 93 | 94 | // strip "--font" tag and push into an array 95 | var fontLayers = []; 96 | var flag = /--font/gi; 97 | for (i=0 ; i < taggedData.length; ++i){ 98 | fontLayers.push(taggedData[i].split(flag, 1)); 99 | } 100 | // log stripped tags to templater.log 101 | try{ 102 | $D.log.msg(textLabel, textMsg3, fontLayers); 103 | } catch (err){ 104 | $D.log.msg(textLabel, textMsg3, scriptErr + err.line + " : " + err.toString()); 105 | } 106 | 107 | // swap the fonts if there is a textLayer object that matches the flagged data 108 | for(i=0; i max_characters) ? word.slice(0, max_characters) + '...' : word; 59 | 60 | return truncated_word; 61 | 62 | } 63 | 64 | var current_headline = $D.job.get("headline"); 65 | 66 | $D.log.msg('TRUNCATE', "Now truncating string", current_headline); 67 | $D.job.set("headline", truncate(current_headline)); -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Dataclay 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /NodeJS/concatenate.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Concatenate Templater batch output with ffmpeg. 4 | Copyright (c) Dataclay LLC 2016 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To use this script. Make sure to have ffmpeg installed and point to 12 | both the ffmpeg and ffprobe binaries in the code below. Then, make 13 | sure that the script's dependencies are installed by entering `npm 14 | install` in the root of your working directory. 15 | 16 | Enter the following command within the "After all jobs" field found 17 | within the Templater Preferences dialog. If using the Templater CLI, 18 | enter the following command in the "post_cmd_batch" property found 19 | within the templater-options.json file. 20 | 21 | node /path/to/event-scripts/NodeJS/concatenate.js --details $data_batch --outdir $out_dir --outname "finalrender.mov" 22 | 23 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 24 | 25 | //Constants 26 | var ffmpeg_win = "C:\\Program Files\\ffmpeg\\bin\\ffmpeg.exe", 27 | ffprobe_win = "C:\\Program Files\\ffmpeg\\bin\\ffprobe.exe", 28 | ffmpeg_osx = "/usr/local/bin/ffmpeg", 29 | ffprobe_osx = "/usr/local/bin/ffprobe"; 30 | 31 | //Required Node Modules 32 | var os = require('os'), 33 | path = require('path'), 34 | fs = require('fs'), 35 | glob = require('glob'), 36 | ffmpeg = require('fluent-ffmpeg'), 37 | argv = require('minimist')(process.argv.slice(2)); 38 | 39 | var batch_details_json = require(path.resolve(argv.details)), 40 | ffmpeg_cmd = ffmpeg(), 41 | concat_output = path.join(argv.outdir, argv.outname); 42 | 43 | if (process.platform == 'win32') { 44 | ffmpeg.setFfmpegPath(ffmpeg_win); 45 | ffmpeg.setFfprobePath(ffprobe_win); 46 | } else { 47 | ffmpeg.setFfmpegPath(ffmpeg_osx); 48 | ffmpeg.setFfprobePath(ffprobe_osx); 49 | } 50 | 51 | console.log("\n\nGathering scenes and sequencing for movie"); 52 | for (var i=0; i < batch_details_json.length; i++) { 53 | 54 | var input_file = glob.sync(batch_details_json[i]["output_asset"] + ".*", { })[0]; 55 | console.log("\n\tscene " + (i + 1)); 56 | console.log("\t" + input_file); 57 | 58 | ffmpeg_cmd.input(path.resolve(input_file)); 59 | 60 | } 61 | 62 | ffmpeg_cmd.on('start', (command) => { 63 | console.log('\n\nStarting concatenation of output:\n\n\t' + command); 64 | }); 65 | 66 | ffmpeg_cmd.on('error', (err, stdout, stderr) => { 67 | console.log("\nError: " + err.message); 68 | console.log(err.stack); 69 | }); 70 | 71 | ffmpeg_cmd.on('end', (stdout, err) => { 72 | console.log("\n\nFinal movie"); 73 | console.log("\n\t" + concat_output); 74 | }); 75 | 76 | ffmpeg_cmd.videoCodec('libx264') 77 | .outputOption('-pix_fmt yuv420p') 78 | .noAudio(); 79 | 80 | ffmpeg_cmd.mergeToFile(concat_output); 81 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/README.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | This application references a Templater Bot data source like a Google Sheet and then deploys Templater output video files. It can send them to a cloud-based storage service like Amazon S3, and also transfer them to a streaming provider. After it does these tasks, this app writes back information from these services like URLs, and embed codes to the Google Sheet. 4 | 5 | # Requirements 6 | 7 | You must have Adobe After Effects and Templater Bot installed to use this application. 8 | 9 | As of version 1.2.0 this application supports the following vendor services. 10 | 11 | |Data Source Types |Cloud Storage | Video Streaming | 12 | |:--------------------|:-----------------|:----------------------| 13 | | Google Sheets | Amazon S3 | JWPlatform or Vimeo | 14 | 15 | Of course, we want to support more vendor services, so we would never turn down a contribution or pull request from any of you. If you are so inclined to help with this, please do not hesitate! 16 | 17 | ## Security & Authorization 18 | As a matter of security, you must obtain the following security credentials from the vendors services above. 19 | 20 | 1. 📀 **Datastore**
21 | At the moment, this application only support Google Sheets as a data store. It will only read from and write to a single worksheet within a Sheet document. 22 | + **Google Sheets** 23 | + Read more about [Google Services Accounts](https://cloud.google.com/iam/docs/understanding-service-accounts), [Getting Started with Authentication](https://cloud.google.com/docs/authentication/getting-started) and [Setting Up Authentication for Server to Server Production Applications](https://cloud.google.com/docs/authentication/production#auth-cloud-implicit-nodejs) 24 | + Create a new project in the [Google Cloud Platform API Console](https://cloud.google.com/) and a Google Service Account email address 25 | + Download the credential file associated with your Google Service Account 26 | + Share any Google Sheet document with the Google Service Account email address 27 | 28 | 2. 📡 **Video Streaming Platform**
29 | As of version 1.2 only two video streaming platforms are supported: Vimeo, and JWPlatform. YouTube will be coming shortly — but feel free to add suppor and issue a pull request 😄. 30 | + **Vimeo** 31 | + [Register for a user account on Vimeo](https://developer.jwplayer.com/). A Vimeo Pro account is recommended. 32 | + Find your account's API Key and Secret in your Account dashboard, under the "Account" area 33 | + Create a video player on JWPlatform, and note the hash id of the player you created. 34 | 35 | + **JWPlatform** 36 | + [Register for a user account on JWPlatform](https://developer.jwplayer.com/) 37 | + Find your account's API Key and Secret in your Account dashboard, under the "Account" area 38 | + Create a video player on JWPlatform, and note the hash id of the player you created. 39 | 40 | 3. 🗄 **Storage Platform** 41 | + **Amazon Simple Storage Service** 42 | + Create and retrieve your Amazon Web Services IAM user *Access Key ID* and *Secret Access Key* 43 | + [Install and configure the AWS SDK](https://docs.aws.amazon.com/cli/latest/userguide/installing.html) on any machine that runs this application 44 | + Retrieve your AWS [IAM *Access Key ID* and *Secret Access Key*](https://console.aws.amazon.com/iam/) 45 | + [Create an S3 bucket](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/create-bucket.html) and a folder that within that buck both of which have write permissions for your IAM user account. 46 | 47 | 4. 🎖 **Credential Files**
48 | Fill in or replace the credential files for specific vendor services *if you need them*. You can find placeholder files in this repository 49 | + `google-service-account-creds.json` which can be downloaded from the Google Cloud Platform Console under your Google Service Accounts. 50 | + `aws-credentials.json` which you can modify with your AWS IAM *Access Key ID* and *Secret Access Key* 51 | + `templater-uploader-vimeo.json` which you can modify with the credentials that you can find in your [Vimeo Developer area](http://developer.vimeo.com) of Vimeo's domain. The values are generated after you create a Vimeo app. 52 | + `jwplatform-credentials.json` which you can modify with your JWPlatform account username, API key, and API secret. 53 | 54 | ## Datastore Setup 55 | 5. **Google Sheet** 56 | + Share the Google Sheet document that Templater will be using as a data source with the Google Service Account email address issued to you when you created the Google Services Account on the Google Cloud Platform API console. The email account might look like the following `my-video-uploader@mycompany.iam.gserviceaccount.com` 57 | + Add the following columns to the Google Sheet worksheet that is mapped to the elements within the After Effects template. 58 | 59 | + `output` — if you don't already have one, create an output column and populate each cell in this column with the name of the file that you want for each specific video. This application uses the names in these cells when getting the file on disk. *Each value in this column must be unique!* 60 | + `s3-link` — the cells under this column store URLs to the assets uploaded to your S3 bucket if you choose to have the assets stored there. S3 is great for archival purposes even if you choose a video streaming platform. 61 | + `broadcast-status` — the cells under this column store the status of the stream on the video streaming platform. The only value that this application currently inserts into these cells is `created`. 62 | + `stream-key` — the cells under this column store a universally unique identification string generated by the streaming platform when a video asset it uploaded to it. 63 | + `broadcast-preview` — the cells under this column will store URLs to a screening page. You may or may not have a screening page setup, so the values in this column may not be accurate. 64 | + `embed-script` — the cells under this column will store HTML `")'; 34 | break; 35 | case enums.video.services.YOUTUBE : formula = null 36 | break; 37 | } 38 | 39 | } else { 40 | 41 | switch (service) { 42 | case enums.video.services.VIMEO : formula = stream.embed_code; 43 | break; 44 | case enums.video.services.JWPLATFORM : formula = ""; 45 | break; 46 | case enums.video.services.YOUTUBE : formula = null 47 | break; 48 | } 49 | 50 | } 51 | 52 | } else { 53 | 54 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 55 | 56 | switch (service) { 57 | case enums.video.services.VIMEO : formula = stream.embed_code; 58 | break; 59 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("")'; 60 | break; 61 | case enums.video.services.YOUTUBE : formula = null 62 | break; 63 | } 64 | 65 | } else { 66 | 67 | switch (service) { 68 | case enums.video.services.VIMEO : formula = stream.embed_code; 69 | break; 70 | case enums.video.services.JWPLATFORM : formula = ""; 71 | break; 72 | case enums.video.services.YOUTUBE : formula = null 73 | break; 74 | } 75 | 76 | } 77 | 78 | } 79 | 80 | return formula; 81 | 82 | } 83 | 84 | , preview : function(row) { 85 | 86 | var p = config.params, 87 | service = p.video.service, 88 | formula = null; 89 | 90 | if (config.is_batch()) { 91 | 92 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 93 | 94 | switch (service) { 95 | case enums.video.services.VIMEO : formula = '=CONCATENATE("https://",' + p.video.preview.domain + ', "/",' + p.fields.stream.letter + row.row_idx + ')'; 96 | break; 97 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("http://",' + p.video.preview.domain + ', "#",' + p.video.preview.route + ', "=", ' + p.fields.stream.letter + row.row_idx + ')'; 98 | break; 99 | case enums.video.services.YOUTUBE : formula = null 100 | break; 101 | } 102 | 103 | } else { 104 | 105 | switch (service) { 106 | case enums.video.services.VIMEO : formula = "https://" + enums.vimeo.playback.DOMAIN + "/" + stream.key; 107 | break; 108 | case enums.video.services.JWPLATFORM : formula = "https://" + enums.jw.playback.PREVIEW + stream.key + "-" + p.video.player_key; 109 | break; 110 | case enums.video.services.YOUTUBE : formula = null 111 | break; 112 | } 113 | 114 | } 115 | 116 | } else { 117 | 118 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 119 | 120 | switch (service) { 121 | case enums.video.services.VIMEO : formula = '=CONCATENATE("https://",' + p.video.preview.domain + ', "/", "' + stream.key + '")'; 122 | break; 123 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("http://",' + p.video.preview.domain + ', "#",' + p.video.preview.route + ', "=", "' + stream.key + '")'; 124 | break; 125 | case enums.video.services.YOUTUBE : formula = null; 126 | break; 127 | } 128 | 129 | } else { 130 | 131 | switch (service) { 132 | case enums.video.services.VIMEO : formula = "https://" + enums.vimeo.playback.DOMAIN + "/" + stream.key; 133 | break; 134 | case enums.video.services.JWPLATFORM : formula = "https://" + enums.jw.playback.PREVIEW + stream.key + "-" + p.video.player_key; 135 | break; 136 | case enums.video.services.YOUTUBE : formula = null; 137 | break; 138 | 139 | } 140 | 141 | } 142 | 143 | } 144 | 145 | return formula; 146 | 147 | } 148 | 149 | , url : function(row) { 150 | 151 | var p = config.params; 152 | 153 | return sprintf(p.video.stream_url, stream.key); 154 | 155 | } 156 | } 157 | 158 | module.exports = stream; -------------------------------------------------------------------------------- /NodeJS/deploy-output/vimeo.js: -------------------------------------------------------------------------------- 1 | var log = require('./logger'), 2 | enums = require('./constants'), 3 | config = require('./config'), 4 | stream = require('./stream'), 5 | aws = require('./aws'), 6 | async = require('async'), 7 | moment = require('moment'), 8 | ytLogger = require('bug-killer'), 9 | fs = require('fs'), 10 | readJson = require('r-json'), 11 | rline = require('readline-sync'), 12 | uuid = require('uuid'), 13 | url = require('url'), 14 | logln = require('single-line-log').stdout, 15 | emoji = require('node-emoji'), 16 | path = require('path'), 17 | https = require('https'), 18 | user = null, 19 | api = null; 20 | 21 | const Vimeo = require("vimeo").Vimeo, 22 | opn = require("opn"), 23 | prettyBytes = require("pretty-bytes"), 24 | vmoScopes = ['public','private','create','edit','delete','interact','upload','video_files']; 25 | 26 | var vmo = { 27 | 28 | get : function(step, auth_only) { 29 | 30 | let vmoState = uuid.v4(); 31 | 32 | api = new Vimeo(config.params.video.key, config.params.video.secret); 33 | 34 | if (!config.params.prefs.oauth.vimeo || !config.params.prefs.oauth.vimeo.access_token) { 35 | 36 | opn(api.buildAuthorizationEndpoint(config.params.video.redirect, vmoScopes, vmoState).toString()); 37 | 38 | var auth_url = url.parse(rline.question("\n\nFirst authorize this application to access your Vimeo account.\n\nAfter authorizing this application, copy the URL from the browser's window and press the key\n"), true); 39 | 40 | if (auth_url.query.state !== vmoState) { 41 | 42 | log.error(enums.errors.incorrect_vmo_state); 43 | throw new Error(enums.errors.incorrect_vmo_state); 44 | 45 | } else { 46 | 47 | api.accessToken(auth_url.query.code, config.params.video.redirect, (err, response) => { 48 | 49 | if (err) { 50 | log.error(err.message); 51 | throw err; 52 | } 53 | 54 | config.params.prefs.oauth.vimeo = response; 55 | config.write_prefs(config.params.prefs); 56 | 57 | api.setAccessToken(response.access_token); 58 | user = response.user; 59 | 60 | if (config.params.video.authorize) { 61 | 62 | log.info("\n\t\t%s\tYou're in! This application can now access your Vimeo account." 63 | , emoji.get('tada')); 64 | 65 | log.info("\n\t\t%s\tAuthorization info saved to:\n\t\t\t%s" 66 | , emoji.get('clipboard') 67 | , config.where_prefs()); 68 | 69 | step(true); 70 | } else { 71 | step(); 72 | } 73 | 74 | }); 75 | 76 | } 77 | 78 | } else { 79 | 80 | api.setAccessToken(config.params.prefs.oauth.vimeo.access_token); 81 | log.info("\n\t\tVimeo access token already retrieved!"); 82 | step(); 83 | 84 | } 85 | 86 | }, 87 | 88 | video : { 89 | 90 | key : null, 91 | 92 | asset : null, 93 | 94 | group : { id: null, name: null, uri: null }, 95 | 96 | get_details : function(step) { 97 | 98 | var req_options = { 99 | method: "GET" 100 | , path: ("/videos/" + vmo.video.key) 101 | } 102 | 103 | api.request(req_options, function(error, body, status_code, headers) { 104 | 105 | if (error) { 106 | log.error(error) 107 | step(); 108 | } 109 | 110 | if (body) { 111 | 112 | stream.key = ((config.params.video.privacy === "unlisted") ? body.link.split('https://vimeo.com/')[1] : vmo.video.key); 113 | 114 | log.info("\n\t\t%s\tPrivacy for [ %s ] is set to [ %s ]" 115 | , emoji.get('see_no_evil') 116 | , path.parse(stream.upload).base 117 | , config.params.video.privacy); 118 | 119 | stream.embed_code = body.embed.html; 120 | 121 | log.info("\n\t\t%s\t Vimeo embed code:\n\t\t\t%s" 122 | , emoji.get('man-woman-girl-boy') 123 | , stream.embed_code) 124 | 125 | step(); 126 | } 127 | 128 | }) 129 | 130 | }, 131 | 132 | create_group : function(step) { 133 | 134 | //todo add video to an album grouping 135 | var req_all_albums = { 136 | method : "GET" 137 | , path : "/me/albums" 138 | , query : { per_page: 100 } 139 | } 140 | 141 | if (config.params.video.group) { 142 | 143 | vmo.video.group.name = config.params.video.group; 144 | 145 | //get all the user's albums 146 | api.request(req_all_albums, function(error, body, status_code, headers) { 147 | 148 | if (error) { 149 | log.error(error.message) 150 | throw error; 151 | } 152 | 153 | for (var i=0; i < body.data.length; i++) { 154 | 155 | if (body.data[i]['name'] === config.params.video.group) { 156 | vmo.video.group.uri = body.data[i]['uri']; 157 | 158 | log.info("\n\t\t%s\tGroup [ %s ] already created." 159 | , emoji.get('books') 160 | , vmo.video.group.name); 161 | 162 | break; 163 | } 164 | 165 | } 166 | 167 | if (!vmo.video.group.uri) { 168 | 169 | var req_make_album = { 170 | method : "POST" 171 | , path : "/me/albums" 172 | , query : { name : config.params.video.group 173 | , description : config.params.video.group 174 | } 175 | }; 176 | 177 | api.request(req_make_album, function(error, body, status_code, headers) { 178 | 179 | if (error) { 180 | log.error(error.message) 181 | throw error; 182 | } 183 | 184 | vmo.video.group.uri = body.uri; 185 | step(); 186 | 187 | }) 188 | 189 | } else { 190 | 191 | step(); 192 | } 193 | 194 | }); 195 | 196 | } else { 197 | step(); 198 | } 199 | 200 | }, 201 | 202 | add_to_group : function(step) { 203 | 204 | if (config.params.video.group) { 205 | 206 | var group_id = vmo.video.group.uri.substring(vmo.video.group.uri.lastIndexOf('/') + 1, vmo.video.group.uri.length); 207 | 208 | var req_vid_to_group = { 209 | method : "PUT" 210 | , path : ("/me/albums/" + group_id + "/videos/" + vmo.video.key) 211 | } 212 | 213 | api.request(req_vid_to_group, function(error, body, status_code, headers) { 214 | 215 | if (error) { 216 | log.error(error); 217 | throw error; 218 | } else { 219 | log.info("\n\t\t%s\tMoved [ %s ] to group [ %s ]" 220 | , emoji.get('books') 221 | , path.parse(stream.upload).base 222 | , vmo.video.group.name); 223 | } 224 | step(); 225 | 226 | }); 227 | 228 | } else { 229 | 230 | step(); 231 | 232 | } 233 | 234 | }, 235 | 236 | set_poster : function(step) { 237 | 238 | //1. Get the picture URI for the video 239 | var p = config.params 240 | , upload_uri = null 241 | , upload_link = null 242 | , poster_stat_uri = null; 243 | 244 | async.series([ (next) => { 245 | 246 | var req_picture_uri = { 247 | method: "GET" 248 | , path: "/videos/" + vmo.video.key 249 | } 250 | 251 | api.request(req_picture_uri, (err, body, status_code, headers) => { 252 | if (err) { 253 | log.error(err); 254 | } else { 255 | 256 | upload_uri = body.metadata.connections.pictures.uri; 257 | 258 | log.info("\n\t\t%s\tPoster setup endpoint => %s" 259 | , emoji.get('frame_with_picture') 260 | , upload_uri); 261 | 262 | } 263 | 264 | next(); 265 | 266 | }) 267 | 268 | }, 269 | 270 | (next) => { 271 | 272 | var req_upload_link = { 273 | method : "POST" 274 | , path : upload_uri 275 | } 276 | 277 | api.request(req_upload_link, (err, body, status_code, headers) => { 278 | 279 | if (err) { 280 | log.error(err) 281 | } else { 282 | upload_link = body.link; 283 | //console.log(body); 284 | poster_stat_uri = body.uri; 285 | log.info("\n\t\t%s\tPoster upload link => %s" 286 | , emoji.get('frame_with_picture') 287 | , upload_link); 288 | } 289 | 290 | next(); 291 | 292 | }) 293 | 294 | }, 295 | 296 | (next) => { 297 | 298 | var vmoClient = new Vimeo(p.video.key, p.video.secret, p.prefs.oauth.vimeo.access_token); 299 | 300 | var req_upload_poster = vmoClient._buildRequestOptions({ 301 | method : 'PUT', 302 | port : 443, 303 | hostname : 'i.cloud.vimeo.com', 304 | path : upload_link.replace('https://i.cloud.vimeo.com', ''), 305 | query : stream.thumb, 306 | headers : { 307 | 'Content-Type' : 'image/png', 308 | 'Accept' : 'application/vnd.vimeo.*+json;version=3.4' 309 | } 310 | }); 311 | 312 | let output = ''; 313 | 314 | const req = https.request(req_upload_poster, function(res) { 315 | 316 | // log.info('STATUS: ' + res.statusCode); 317 | // log.info('HEADERS: ' + JSON.stringify(res.headers)); 318 | 319 | res.setEncoding('utf8'); 320 | 321 | res.on('end', () => { 322 | //log.info(output); 323 | log.info("\n\t\t%s\tFinished uploading poster => %s" 324 | , emoji.get('frame_with_picture') 325 | , stream.thumb ); 326 | 327 | next(); 328 | }); 329 | 330 | res.on('data', function (chunk) { 331 | output += chunk; 332 | }); 333 | 334 | }); 335 | 336 | req.on('error', function(e) { 337 | log.error('Error: ' + e.message); 338 | }); 339 | 340 | req.on('start', (e) => { 341 | log.info("Started request!"); 342 | }); 343 | 344 | fs.createReadStream(stream.thumb).pipe(req); 345 | 346 | }, 347 | 348 | (next) => { 349 | 350 | var req_picture_uri = { 351 | method: "PATCH" 352 | , path: poster_stat_uri 353 | , query: { 354 | "active" : true 355 | } 356 | } 357 | 358 | log.info("\n\t\t%s\tPoster activation endpoint => %s" 359 | , emoji.get('frame_with_picture') 360 | , poster_stat_uri); 361 | 362 | api.request(req_picture_uri, (err, body, status_code, headers) => { 363 | if (err) { 364 | log.error("Error:\n\n%o", err); 365 | } else { 366 | log.info("\n\t\t%s\tPoster activated? => %s" 367 | , emoji.get('frame_with_picture') 368 | , body.active); 369 | } 370 | 371 | next(); 372 | 373 | }) 374 | 375 | } 376 | 377 | ], step); 378 | 379 | //make a call to set the poster frame by time - does not currently work in auto-upload context 380 | /* 381 | var p = config.params, 382 | req_set_poster = { 383 | method : "POST" 384 | , path : "/videos/" + vmo.video.key + "/pictures" 385 | , query : { active: true, time: parseFloat(p.video.thumb) } 386 | } 387 | 388 | api.request(req_set_poster, (err, body, status_code, headers) => { 389 | 390 | if (err) { 391 | log.error(err); 392 | } else { 393 | log.info("\n\t\t%s\tSet the poster frame for [ %s ] to [ %s ] seconds" 394 | , emoji.get('frame_with_picture') 395 | , path.parse(stream.upload).base 396 | , parseFloat(p.video.thumb)) 397 | } 398 | 399 | step(); 400 | 401 | }) 402 | */ 403 | 404 | }, 405 | 406 | set_embed_preset : function(step) { 407 | 408 | var p = config.params 409 | , preset = null 410 | , preset_uri = null 411 | , preset_id = null; 412 | 413 | //console.log("Player key is [ " + p.video.player_key + " ] and is of type [ " + typeof p.video.player_key + " ] "); 414 | 415 | if (p.video.player_key) { 416 | 417 | async.series([ 418 | 419 | (next) => { 420 | 421 | var req_embeds = { 422 | method : "GET" 423 | , path : "/me/presets" 424 | } 425 | 426 | api.request(req_embeds, (err, body, status_code, headers) => { 427 | 428 | if (err) { 429 | 430 | log.error(err); 431 | 432 | } else { 433 | 434 | preset = body.data.find(x => x.name === p.video.player_key); 435 | preset_uri = preset.uri.split('/'); 436 | preset_id = preset_uri[preset_uri.length-1]; 437 | 438 | // log.info("\n\t\t%s\tFound Vimeo Player Preset [ %s ] with id [ %s ]" 439 | // , emoji.get('film_frames') 440 | // , preset.name 441 | // , preset_id); 442 | 443 | } 444 | 445 | next(); 446 | 447 | }) 448 | 449 | }, 450 | 451 | (next) => { 452 | 453 | var req_embed_set = { 454 | method : "PUT" 455 | , path : "/videos/" + vmo.video.key + "/presets/" + preset_id 456 | } 457 | 458 | console.log(req_embed_set); 459 | 460 | api.request(req_embed_set, (err, body, status_code, headers) => { 461 | 462 | console.log(status_code) 463 | 464 | if (err) { 465 | 466 | log.error(err); 467 | 468 | } else { 469 | 470 | log.info("\n\t\t%s\tSet Vimeo video with id [ %s ] to embed preset [ %s ]" 471 | , emoji.get('film_frames') 472 | , vmo.video.key 473 | , preset.name); 474 | 475 | } 476 | 477 | next(); 478 | 479 | }) 480 | 481 | } ], (err) => { 482 | 483 | log.info("\n\t\t%s\tDone setting Vimeo Embed Preset!" 484 | , emoji.get('film_frames')); 485 | 486 | step(); 487 | 488 | }); 489 | 490 | } else { 491 | 492 | //no player exists so we move on 493 | step(); 494 | 495 | } 496 | 497 | }, 498 | 499 | create : function(row, step) { 500 | 501 | var p = config.params, 502 | vid = { 503 | name : (config.is_batch() ? row[p.video.title] : p.video.title) 504 | , description : (config.is_batch() ? row[p.video.desc] : p.video.desc) 505 | , privacy : { 506 | view : p.video.privacy 507 | , download : p.video.downloadable 508 | , comments : p.video.comments 509 | } 510 | }; 511 | 512 | //If the AWS is set as storage and a link exists, then use Vimeo's pull method 513 | if (p.storage.type === enums.storage.types.S3 && aws.S3_URL.video) 514 | { 515 | 516 | log.info("\n\t\t%s\tVimeo pulling video from storage [ %s ] from link\n\t\t\t\t[ %s ]" 517 | , emoji.get('telephone_receiver') 518 | , p.storage.type 519 | , aws.S3_URL.video); 520 | 521 | vid.upload = { approach: "pull", link: aws.S3_URL.video } 522 | 523 | var pull_req = { 524 | method : "POST" 525 | , path : "/me/videos/" 526 | , query : vid 527 | } 528 | 529 | api.request(pull_req, (error, body, status_code, headers) => { 530 | 531 | if (error) { 532 | log.error("Error with request to pull video from S3 => \n\t%o\n\n\t%s" 533 | , pull_req 534 | , error); 535 | } 536 | 537 | vmo.video.key = body.uri.substring(body.uri.lastIndexOf('/') + 1, body.uri.length); 538 | 539 | log.info("\n\t\t%s\tFinished uploading [ %s ] to Vimeo!" 540 | , emoji.get('clapper') 541 | , path.parse(stream.upload).base); 542 | 543 | async.series([ 544 | vmo.video.create_group, 545 | vmo.video.add_to_group, 546 | vmo.video.set_poster, 547 | vmo.video.set_embed_preset, 548 | vmo.video.get_details 549 | ], step); 550 | 551 | }); 552 | 553 | 554 | } else { 555 | 556 | log.info("\n\t\t%s\tInitiating upload of [ %s ] to Vimeo ... %s" 557 | , emoji.get('boom') 558 | , path.parse(stream.upload).base 559 | , emoji.get('rocket')) 560 | 561 | api.upload( 562 | 563 | stream.upload, //path to asset on the filesystem 564 | vid, //options to upload 565 | 566 | function (uri) { 567 | 568 | vmo.video.key = uri.substring(uri.lastIndexOf('/') + 1, uri.length); 569 | 570 | log.info("\n\t\t%s\tFinished uploading [ %s ] to Vimeo!" 571 | , emoji.get('clapper') 572 | , path.parse(stream.upload).base); 573 | 574 | async.series([ 575 | vmo.video.create_group, 576 | vmo.video.add_to_group, 577 | vmo.video.set_poster, 578 | vmo.video.set_embed_preset, 579 | vmo.video.get_details 580 | ], step); 581 | 582 | }, 583 | 584 | function (bytesUploaded, bytesTotal) { 585 | var percentage = (bytesUploaded / bytesTotal * 100).toFixed(2) 586 | logln("\n\t\t" + emoji.get('clapper') + "\tSending [ " + path.parse(stream.upload).base + " ] to Vimeo... " + percentage + "% " + emoji.get('rocket') + "\n"); 587 | }, 588 | 589 | function (error) { 590 | log.error('\n\nUpload of video asset failed because: %s' 591 | , error) 592 | step(); 593 | 594 | } 595 | ) 596 | 597 | } 598 | 599 | } 600 | 601 | } 602 | 603 | } 604 | 605 | module.exports = vmo; -------------------------------------------------------------------------------- /NodeJS/deploy-output/youtube.js: -------------------------------------------------------------------------------- 1 | var enums = require('./constants'), 2 | config = require('./config'), 3 | moment = require('moment'), 4 | ytLogger = require("bug-killer"), 5 | fs = require("fs"), 6 | readJson = require("r-json"), 7 | rline = require('readline-sync'), 8 | //rl = rline.createInterface(process.stdin, process.stdout), 9 | api = null; 10 | 11 | const Youtube = require("youtube-api"), 12 | opn = require("opn"), 13 | prettyBytes = require("pretty-bytes"); 14 | 15 | var yt = { 16 | 17 | get : function(step) { 18 | 19 | let oauth = Youtube.authenticate({ 20 | type : "oauth" 21 | , client_id : config.params.video.key 22 | , client_secret : config.params.video.secret 23 | , redirect_url : config.params.auth.yt.creds.installed.redirect_uris[0] 24 | }) 25 | 26 | //TODO: Check if there is an existing access token saved to file. If not then open browser and prompt for access code. 27 | if (!config.params.prefs.oauth.youtube || !config.params.prefs.oauth.youtube.refresh_token) { 28 | 29 | opn(oauth.generateAuthUrl({ 30 | access_type: "offline" 31 | , scope: ["https://www.googleapis.com/auth/youtube.upload"] 32 | })); 33 | 34 | var auth_code = rline.question("\n\nFirst grant this application authorization to access your YouTube account.\n\nPlease enter in the authorization code from your browser and press the key\n"); 35 | 36 | oauth.getToken(auth_code, (err, tokens) => { 37 | 38 | if (err) { 39 | throw err; 40 | } 41 | 42 | config.params.prefs.oauth.youtube = tokens; 43 | config.write_prefs(config.params.prefs); 44 | step(); 45 | 46 | }); 47 | 48 | } else { 49 | 50 | console.log("\nAttempting to retreive access token using refresh token. Please wait...."); 51 | step(); 52 | 53 | } 54 | 55 | }, 56 | 57 | video : { 58 | 59 | key : null, 60 | 61 | create : function(row, step) { 62 | 63 | var vid = { 64 | download_url : row.s3_url 65 | , title : config.params.video.title 66 | , description : config.params.video.desc 67 | , author : config.params.user.name 68 | , expires_date : moment().add(1, 'years').unix() 69 | } 70 | 71 | api.post('/v1/videos/create', vid, null, function(err, results){ 72 | 73 | if (err) { 74 | console.log(err) 75 | } else { 76 | jw.video.key = results.video.key; 77 | } 78 | 79 | step(); 80 | 81 | }); 82 | 83 | } 84 | 85 | } 86 | 87 | } 88 | 89 | module.exports = yt; -------------------------------------------------------------------------------- /NodeJS/log-footage-download.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Logs pre and post footage download in Templater for After Effects 4 | Copyright (c) Dataclay LLC 2024 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To log information about footage assets Templater retrieves, enter the 12 | following command within the "Before footage download" and 13 | "After footage download" fields found within the Templater Preferences 14 | dialog. If using the Templater CLI, enter it into the "pre_cmd_dl" 15 | and "post_cmd_dl" properties found within the templater-options.json file. 16 | 17 | 18 | /path/to/node /path/to/log-footage-download.js --event $event --aefile $aep --data $data_job --aedir $aep_dir --uri $dl_file_uri --name $dl_file_name --dir $dl_file_dir --mime $dl_file_mime --ext $dl_file_ext --file $dl_file 19 | 20 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 21 | 22 | //Required NodeJS Modules 23 | var os = require('os'), 24 | fs = require('fs'), 25 | fse = require('fs-extra'), 26 | util = require('util'), 27 | path = require('path'), 28 | moment = require('moment'), 29 | argv = require('minimist')(process.argv.slice(2)); 30 | 31 | var logfile = "templater-ftg-download.log", 32 | job_data = require(argv.data); 33 | proj = path.resolve(argv.aedir), 34 | log = path.join(os.tmpdir(), logfile), 35 | log_dest = path.join(proj, "templater-post-job.log"), 36 | msg = msg = "\r\n------------ [TEMPLATER DOWNLOAD] -----------\r\n"; 37 | 38 | 39 | //Design the output for the post job log 40 | msg += "\r\nTemplater Event [ " + argv.event + " ] on [ " + moment().format('MMMM Do YYYY, h:mm:ss a') + "]"; 41 | msg += "\r\n\r\nData set\r\n " + JSON.stringify(job_data, null, 4).replace(/(?:\r\n|\r|\n)/g, "\r\n "); 42 | msg += "\r\n\r\nProject directory\r\n > " + proj; 43 | msg += "\r\n\r\nProject file \r\n > " + path.basename(argv.aefile); 44 | msg += "\r\n\r\nDownload file URI\r\n > " + argv.uri; //$dl_file_uri 45 | msg += "\r\n\r\nDownload file name\r\n > " + argv.name; //$dl_file_name 46 | msg += "\r\n\r\nDownload file directory\r\n > " + argv.dir; //$dl_file_dir 47 | msg += "\r\n\r\nDownload mime type\r\n > " + argv.mime; //$dl_file_mime 48 | msg += "\r\n\r\nDownload file extension\r\n > " + argv.ext; //$dl_file_ext 49 | msg += "\r\n\r\nDownload file asset\r\n > " + argv.file; //$dl_file 50 | msg += "\r\n\r\n"; 51 | 52 | 53 | //Append to log and copy log to project directory. 54 | //NOTE: On Windows, NodeJS cannot append to files that 55 | // exist on a mapped network drive. First we 56 | // append a local file in the temp directory, then 57 | // copy it to the project directory 58 | 59 | try { 60 | fs.appendFileSync(log, msg, 'utf8'); 61 | fse.copySync(log, log_dest); 62 | } catch (err) { 63 | console.error(err.message); 64 | fs.appendFileSync("\r\nError : " + err.message); 65 | } 66 | -------------------------------------------------------------------------------- /NodeJS/log-footage-processing.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Logs pre and post footage processing in Templater for After Effects 4 | Copyright (c) Dataclay LLC 2024 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To log information about footage assets Templater retrieves, enter the 12 | following command within the "Before footage download" and 13 | "After footage download" fields found within the Templater Preferences 14 | dialog. If using the Templater CLI, enter it into the "pre_cmd_ftg" 15 | and "post_cmd_ftg" properties found within the templater-options.json file. 16 | 17 | 18 | /path/to/node /path/to/log-footage-processing.js --event $event --aefile $aep --data $data_job --aedir $aep_dir --layer $ftg_layer --name $ftg_name --file $ftg_file --dir $ftg_dir --ext $ftg_ext 19 | 20 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 21 | 22 | //Required NodeJS Modules 23 | var os = require('os'), 24 | fs = require('fs'), 25 | fse = require('fs-extra'), 26 | util = require('util'), 27 | path = require('path'), 28 | moment = require('moment'), 29 | argv = require('minimist')(process.argv.slice(2)); 30 | 31 | var logfile = "templater-ftg-process.log", 32 | job_data = require(argv.data); 33 | proj = path.resolve(argv.aedir), 34 | log = path.join(os.tmpdir(), logfile), 35 | log_dest = path.join(proj, "templater-ftg-process.log"), 36 | msg = msg = "\r\n------------ [TEMPLATER DOWNLOAD] -----------\r\n"; 37 | 38 | 39 | //Design the output for the post job log 40 | msg += "\r\nTemplater Event [ " + argv.event + " ] on [ " + moment().format('MMMM Do YYYY, h:mm:ss a') + "]"; 41 | msg += "\r\n\r\nData set\r\n " + JSON.stringify(job_data, null, 4).replace(/(?:\r\n|\r|\n)/g, "\r\n "); 42 | msg += "\r\n\r\nProject directory\r\n > " + proj; 43 | msg += "\r\n\r\nProject file \r\n > " + path.basename(argv.aefile); 44 | msg += "\r\n\r\nNew footage layer\r\n > " + argv.layer; //$ftg_layer 45 | msg += "\r\n\r\nNew footage file\r\n > " + argv.file; //$ftg_file 46 | msg += "\r\n\r\nNew footage name\r\n > " + argv.name; //$ftg_name 47 | msg += "\r\n\r\nNew footage dir\r\n > " + argv.dir; //$ftg_dir 48 | msg += "\r\n\r\nNew footage extension\r\n > " + argv.ext; //$ftg_ext 49 | msg += "\r\n\r\n"; 50 | 51 | 52 | //Append to log and copy log to project directory. 53 | //NOTE: On Windows, NodeJS cannot append to files that 54 | // exist on a mapped network drive. First we 55 | // append a local file in the temp directory, then 56 | // copy it to the project directory 57 | 58 | try { 59 | fs.appendFileSync(log, msg, 'utf8'); 60 | fse.copySync(log, log_dest); 61 | } catch (err) { 62 | console.error(err.message); 63 | fs.appendFileSync("\r\nError : " + err.message); 64 | } 65 | -------------------------------------------------------------------------------- /NodeJS/package-sequence.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Archive an image sequence generated by Adobe After Effects via Templater 4 | Copyright (c) Dataclay LLC 2016 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | Enter the following command within the "After all jobs" field found 12 | within the Templater Preferences dialog. If using the Templater CLI, 13 | enter the following command in the "post_cmd_job" property found 14 | within the templater-options.json file. 15 | 16 | node /path/to/event-scripts/NodeJS/package-sequence.js --outdir $out_dir --outname $id --aefile $aep --aedir $aep_dir --repo "/path/to/archive/repository" --extension "zip" 17 | 18 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 19 | 20 | //Required NodeJS Modules 21 | var os = require('os'), 22 | fs = require('fs'), 23 | fse = require('fs-extra'), 24 | util = require('util'), 25 | path = require('path'), 26 | moment = require('moment'), 27 | archiver = require('archiver'); 28 | argv = require('minimist')(process.argv.slice(2)); 29 | 30 | //Optional for testing: 31 | //Change these paths and strings if you want to test this script on the command line outside of AE 32 | var test_dest = "/path/to/output/folder", 33 | test_proj = "/path/to/project/folder", 34 | test_outname = "sequence_id"; //Use a name / id of a given job. 35 | test_repo = "/path/to/archive/repository"; 36 | test_ext = "zip"; //Use any extension you would like. 37 | 38 | var msg = ''; 39 | msg += "\r\n\r\n"; 40 | msg += "\r\n\r\n+-------------------------------------------------------------------+\r\n"; 41 | msg += "| Packaging Image Sequence |\\\r\n", 42 | msg += "+-------------------------------------------------------------------+\\\r\n", 43 | msg += "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\r\n\r\n" 44 | 45 | var logfile = "archival.log", 46 | proj = path.resolve(argv.aedir || test_proj), 47 | log = path.resolve(os.tmpdir(), logfile), 48 | log_dest, out_root, out_dir; 49 | 50 | //Location to write the archived sequence 51 | if (argv.outdir && argv.outname) { 52 | log_dest = path.resolve(argv.repo, logfile); 53 | out_root = path.resolve(argv.outdir); 54 | out_dir = path.resolve(argv.outdir, argv.outname); 55 | out_name = argv.outname; 56 | repo_dir = path.resolve(argv.repo); 57 | arch_ext = argv.extension; 58 | } else { 59 | log_dest = path.resolve(test_repo, logfile); 60 | out_root = path.resolve(test_dest); 61 | out_dir = path.resolve(test_dest, test_outname); 62 | out_name = test_outname; 63 | repo_dir = path.resolve(test_repo); 64 | arch_ext = test_ext; 65 | } 66 | 67 | //Design the output for the post job log 68 | msg += "\r\nArchived on => " + moment().format('MMMM Do YYYY, h:mm:ss a'); 69 | msg += "\r\n\r\nAE Project File => " + argv.aefile; 70 | msg += "\r\n\r\nAE Project Directory => " + proj; 71 | msg += "\r\n\r\nTemplater Output Root => " + out_root; 72 | msg += "\r\n\r\nAE Output Name => " + out_name; 73 | msg += "\r\n\r\nSequence Directory => " + out_dir; 74 | msg += "\r\n\r\nArchive Extension => " + arch_ext; 75 | 76 | console.log(msg); 77 | 78 | //Use archiver to zip up the entire out_dir 79 | var archive_filename = out_name + "." + arch_ext, 80 | archive_file = path.resolve(out_root, archive_filename), 81 | archive_output = fs.createWriteStream(archive_file), 82 | archive = archiver("zip"); 83 | 84 | archive_output.on('close', function() { 85 | 86 | msg += "\r\n\r\nArchive Size => " + archive.pointer(); 87 | msg += "\r\n\r\nArchive Location => " + path.resolve(repo_dir, archive_filename); 88 | msg += "\r\n\r\nArchiver finalized. The output file descriptor has closed." 89 | 90 | //Append to log and copy log to project directory. 91 | //NOTE: On Windows, NodeJS cannot append files that 92 | // exist on a mapped network drive. First we 93 | // append a local file in the temp directory, then 94 | // copy it to the project directory 95 | try { 96 | fs.appendFileSync(log, msg, 'utf8'); 97 | fse.copySync(log, log_dest); 98 | } catch (err) { 99 | console.error(err.message); 100 | fs.appendFileSync("\r\nError : " + err.message); 101 | } 102 | 103 | //Copy the archive to its repository 104 | fse.copySync(archive_file, path.resolve(repo_dir, archive_filename)); 105 | 106 | }); 107 | 108 | archive_output.on('error', function(err) { 109 | throw err; 110 | }); 111 | 112 | archive.pipe(archive_output); 113 | 114 | archive.directory(out_dir, "/").finalize(); 115 | -------------------------------------------------------------------------------- /NodeJS/post_job.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Log each Templater versioning job to a file 4 | Copyright (c) Dataclay LLC 2016 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To log information about the most recently completed job, enter the 12 | following command within the "After each job" field found within the 13 | Templater Preferences dialog. If using the Templater CLI, enter it 14 | into the "post_cmd_job" property found within the 15 | templater-options.json file. 16 | 17 | node /path/to/event-scripts/NodeJS/post_job.js --outdir $out_dir --aefile $aep --data $data_job --aedir $aep_dir --outfile $out_file -- $title 18 | 19 | NOTE: The "-- $title" part of the command assumes that your data 20 | source has a column header, or property key, named "title" 21 | 22 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 23 | 24 | //Required NodeJS Modules 25 | var os = require('os'), 26 | fs = require('fs'), 27 | fse = require('fs-extra'), 28 | util = require('util'), 29 | path = require('path'), 30 | moment = require('moment'), 31 | argv = require('minimist')(process.argv.slice(2)); 32 | 33 | var logfile = "templater-post-job.log", 34 | job_data = require(argv.data); 35 | proj = path.resolve(argv.aedir), 36 | log = path.join(os.tmpdir(), logfile), 37 | log_dest = path.join(proj, "templater-post-job.log"), 38 | msg = msg = "\r\n------------ [TEMPLATER JOB] -----------\r\n"; 39 | 40 | 41 | //Design the output for the post job log 42 | msg += "\r\nJob completed processing on\r\n > " + moment().format('MMMM Do YYYY, h:mm:ss a'); 43 | msg += "\r\n\r\nFinished processing project\r\n > " + argv.aefile; 44 | msg += "\r\n\r\nProject directory\r\n > " + proj; 45 | msg += "\r\n\r\nOutput directory\r\n > " + argv.outdir; 46 | msg += "\r\n\r\nOutput asset\r\n > " + argv.outfile; 47 | msg += "\r\n\r\nTitle\r\n > " + argv._[0]; 48 | msg += "\r\n\r\nData set\r\n > " + JSON.stringify(job_data, null, 4).replace(/(?:\r\n|\r|\n)/g, "\r\n "); 49 | msg += "\r\n\r\n" 50 | 51 | 52 | //Append to log and copy log to project directory. 53 | //NOTE: On Windows, NodeJS cannot append to files that 54 | // exist on a mapped network drive. First we 55 | // append a local file in the temp directory, then 56 | // copy it to the project directory 57 | 58 | try { 59 | fs.appendFileSync(log, msg, 'utf8'); 60 | fse.copySync(log, log_dest); 61 | } catch (err) { 62 | console.error(err.message); 63 | fs.appendFileSync("\r\nError : " + err.message); 64 | } 65 | -------------------------------------------------------------------------------- /NodeJS/watch-logs.bat: -------------------------------------------------------------------------------- 1 | start cmd /k "bash -c "tail -f /z/Dev/event-scripts/NodeJS/transcode_out.log" " 2 | start cmd /k "bash -c "tail -f /z/Dev/event-scripts/NodeJS/deploy-output/deploy-out.log" " -------------------------------------------------------------------------------- /Windows/event-logger.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | :: +--------------------------------------------------------------------+ 3 | :: | ____ __ __ | 4 | :: | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | :: | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | :: | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | :: | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | :: | Automating Digital Production /____/ | 9 | :: | | 10 | :: | | 11 | :: | We believe that leveraging data in the design process should | 12 | :: | be a playful and rewarding art. Our products make this | 13 | :: | possible for digital content creators. | 14 | :: | | 15 | :: | |email |web |twitter | 16 | :: | |support@dataclay.com |dataclay.com |@dataclay | 17 | :: | | 18 | :: | This code is provided to you for your personal or commercial | 19 | :: | use. However, you must abide by the terms of the MIT | 20 | :: | License: https://opensource.org/licenses/MIT | 21 | :: | | 22 | :: | | 23 | :: | Copyright 2013-2020 Dataclay, LLC | 24 | :: | Licensed under the MIT License | 25 | :: | | 26 | :: +--------------------------------------------------------------------+ 27 | 28 | :: This is a sample bash script for learning how shell scripts can work 29 | :: with Templater Bot on macOS. 30 | :: 31 | :: Follow these steps to use this script: 32 | :: 33 | :: 1. Open an After Effects project that is mapped to a data source 34 | :: using Templater. 35 | :: 36 | :: 2. Open the main Templater panel. Open the `Templater Preferences` 37 | :: dialog. Click the `Setup Shell Scripts` button. 38 | :: 39 | :: 3. Register the following to any or all of the events 40 | :: 41 | :: Z:\path\to\event-logger.bat $event $id $aep_dir $log $data_job $aep $out_dir $out_name $data_batch $bot_name $machine_name $user_name $sources $data_start $data_end $out_file $now 42 | :: 43 | :: 4. Iterate through "Previews" of jobs as stored in Templater's 44 | :: connected data source. 45 | :: 46 | :: 5. Inspect the `events.log` file that will be created in the same 47 | :: directory as the After Effects project file. 48 | 49 | set EVENT=%~1 50 | set JOB=%~2 51 | set AEP_LOC=%~3 52 | set TEMPLATER_LOG=%~4 53 | set DATA_JOB=%~5 54 | set AEP=%~6 55 | set OUT_DIR=%~7 56 | set OUT_NAME=%~8 57 | set DATA_BATCH=%~9 58 | SHIFT 59 | SHIFT 60 | SHIFT 61 | SHIFT 62 | SHIFT 63 | SHIFT 64 | SHIFT 65 | SHIFT 66 | SHIFT 67 | set BOT_NAME=%~1 68 | set MACHINE=%~2 69 | set USER=%~3 70 | set SOURCES=%~4 71 | set DATA_START=%~5 72 | set DATA_END=%~6 73 | set OUT_FILE=%~7 74 | set NOW=%~8 75 | set n=^&echo. 76 | 77 | ::The `events.log` file will be in the same directory as the AEP file 78 | set log=%AEP_LOC%\events.log 79 | 80 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 81 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 82 | 83 | echo --- %mydate% @ %mytime% [ TEMPLATER EVENT : %EVENT% ] --- >> "%log%" 84 | echo. >> "%log%" 85 | echo Templater Log File : %TEMPLATER_LOG% >> "%log%" 86 | echo Machine Name : %MACHINE% >> "%log%" 87 | echo User Name : %USER% >> "%log%" 88 | echo Timestamp : %NOW% >> "%log%" 89 | echo Source Location : %SOURCES% >> "%log%" 90 | echo Data Start : %DATA_START% >> "%log%" 91 | echo Data End : %DATA_END% >> "%log%" 92 | echo Bot Name : %BOT_NAME% >> "%log%" 93 | echo AEP File : %AEP% >> "%log%" 94 | echo AEP Location : %AEP_LOC% >> "%log%" 95 | echo. >> "%log%" 96 | 97 | if "%EVENT%" == "bot_pre_job" ( 98 | 99 | echo Specific to [ %EVENT% ] event: >> "%log%" 100 | echo Job Data File : %DATA_JOB% >> "%log%" 101 | echo Output Asset Name : %OUT_NAME% >> "%log%" 102 | echo Output File Path : %OUT_FILE% >> "%log%" 103 | echo Output Directory : %OUT_DIR% >> "%log%" 104 | echo. >> "%log%" 105 | 106 | ) 107 | 108 | if "%EVENT%" == "bot_post_job" ( 109 | 110 | echo Specific to [ %EVENT% ] event: >> "%log%" 111 | echo Job Data File : %DATA_JOB% >> "%log%" 112 | echo Output Asset Name : %OUT_NAME% >> "%log%" 113 | echo Output File Path : %OUT_FILE% >> "%log%" 114 | echo Output Directory : %OUT_DIR% >> "%log%" 115 | echo. >> "%log%" 116 | 117 | ) 118 | 119 | if "%EVENT%" == "bot_pre_layr" ( 120 | 121 | echo Specific to [ %EVENT% ] event: >> "%log%" 122 | echo NONE >> "%log%" 123 | echo. >> "%log%" 124 | 125 | ) 126 | 127 | if "%EVENT%" == "bot_post_layr" ( 128 | 129 | echo Specific to [ %EVENT% ] event: >> "%log%" 130 | echo NONE >> "%log%" 131 | echo. >> "%log%" 132 | 133 | ) 134 | 135 | if "%EVENT%" == "bot_pre_rndr" ( 136 | 137 | echo Specific to [ %EVENT% ] event: >> "%log%" 138 | echo NONE >> "%log%" 139 | echo. >> "%log%" 140 | 141 | ) 142 | 143 | if "%EVENT%" == "bot_post_rndr" ( 144 | 145 | echo Specific to [ %EVENT% ] event: >> "%log%" 146 | echo NONE >> "%log%" 147 | echo. >> "%log%" 148 | 149 | ) 150 | 151 | if "%EVENT%" == "bot_post_batch" ( 152 | 153 | echo Speicifc to [ %EVENT% ] event: >> "%log%" 154 | echo Batch Data File : "%DATA_BATCH%" 155 | echo. >> "%log%" 156 | 157 | ) -------------------------------------------------------------------------------- /Windows/on-bot-disable-win.php: -------------------------------------------------------------------------------- 1 | The given name of The Bot as found in Templater's Preferences dialog 13 | * $argv[2] => Absolute path to the AE project file being processed at the time of disable 14 | * $argv[3] => Absolute path to the folder containing the AE project file being processed 15 | * 16 | * Provided for your personal or commercial use by Dataclay, LLC. 17 | * 18 | * NOTE: THIS IS ONLY SAMPLE CODE. IT WILL LIKELY NOT WORK IN YOUR ENVIRONMENT. 19 | * THE SCRIPT IS INCLUDING OTHER SCRIPTS NOT PRESENT IN THIS CODE. 20 | */ 21 | 22 | include_once('smtp.conf.php'); 23 | require_once 'vendor/swiftmailer/swiftmailer/lib/swift_required.php'; 24 | 25 | $senderName = "Dataclay Information"; 26 | $senderEmail = 'info@dataclay.io'; 27 | $recipientEmail = 'support@dataclay.io'; 28 | $recipientName = 'Dataclay Support'; 29 | $subject = "The Bot for Templater is now disabled"; 30 | 31 | $datetime = date('Y-m-d H:i:s'); 32 | 33 | $senderMsg = "The Bot for Templater was disabled at " . $datetime . "\n\n[Bot Name]\n" . $argv[1] . "\n\n[AE Project File]\n" . $argv[2] . "\n\n[AE Project Folder]\n" . $argv[3] . "\n\nThis could have happened manually or because of an error.\n\nRegards,\nDataclay Support"; 34 | 35 | $transport = Swift_SmtpTransport::newInstance($dclay_smtp_address, $dclay_smtp_port, 'ssl') 36 | ->setUsername($userEmail) 37 | ->setPassword($dclay_smtp_password); 38 | 39 | $mailer = Swift_Mailer::newInstance($transport); 40 | 41 | $message = Swift_Message::newInstance() 42 | ->setSubject($subject) 43 | ->setFrom(array($senderEmail => $senderName)) 44 | ->setTo(array($recipientEmail => $recipientName)) 45 | ->setBody($senderMsg); 46 | 47 | $result = $mailer->send($message); 48 | 49 | ?> 50 | -------------------------------------------------------------------------------- /Windows/on-bot-disable.bat: -------------------------------------------------------------------------------- 1 | :: Sample batch script for Bot-Shutdown Event 2 | :: If you enable 'For all commands, use job details as arguments' 3 | :: some details about the just-finished batch will be appended to the 4 | :: command as arguments. On windows, there was no easy way to call 5 | :: the php script directly, so we call php via a batch file. Furthermore, 6 | :: we had to provide an absolute path to the php executable even though 7 | :: php.exe is in the environment path. 8 | :: 9 | :: The order or the arguments is as follows: 10 | :: 11 | :: %1 => The given name of The Bot as found in Templater's Preferences dialog 12 | :: %2 => Absolute path to the AE project file being processed at the time of disable 13 | :: %3 => Absolute path to the folder containing the AE project file being processed 14 | :: 15 | :: Provided for your personal or commercial use by Dataclay, LLC 16 | 17 | @ECHO OFF 18 | 19 | SET log=%3\templater-bot.log 20 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 21 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 22 | 23 | echo -------- [TEMPLATER BOT] -------- >> %log% 24 | echo The bot went down on %mydate% @ %mytime% >> %log% 25 | echo Sending email notice >> %log% 26 | "C:\Program Files (x86)\PHP\php.exe" "L:\event-scripts\Windows\on-bot-disable-win.php" %1 %2 %3 27 | echo Done sending email notice >> %log% 28 | 29 | 30 | -------------------------------------------------------------------------------- /Windows/post-batch.bat: -------------------------------------------------------------------------------- 1 | :: Sample bash script for Post-Batch Bot Event 2 | :: If you enable 'For all commands, use job details as arguments' 3 | :: some details about the just-finished batch will be appended to the 4 | :: command as arguments. 5 | :: 6 | :: Argument order is as follows for render operations after each job completes 7 | :: %1 => Absolute path to that JSON file containing jobs in most recently completed batch. 8 | :: %2 => Absolute path to the processed AE project file. 9 | :: %3 => Absolute path to the folder containing the processed AE project file. 10 | :: %4 => Absolute path to the root of the specified output location 11 | :: 12 | :: Provided for your personal or commercial use by Dataclay, LLC 13 | 14 | @ECHO ON 15 | 16 | SET log=%3\post-batch.log 17 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 18 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 19 | 20 | echo -------- [TEMPLATER BATCH] -------- >> %log% 21 | echo Batch completed on %mydate% @ %mytime% >> %log% 22 | echo Batch details as JSON are found in file %1 >> %log% 23 | echo Output files in batch operation exist in %4 >> %log% 24 | -------------------------------------------------------------------------------- /Windows/post-job.bat: -------------------------------------------------------------------------------- 1 | :: Sample batch script for Post-Job Bot Event 2 | :: If you enable 'For all commands, use job details as arguments' 3 | :: some details about the just-finished job will be appended to the 4 | :: command as arguments. 5 | :: 6 | :: Argument order is as follows for render operations after each job completes 7 | :: %1 => The row index in the spreadsheet. This is always `null` when Bot is enabled. 8 | :: %2 => The value of the job's ID column if it has one, `null` if no ID value. 9 | :: %3 => The value of the job's devised output name. 10 | :: %4 => Absolute path to the final rendered file if it was rendered. 11 | :: %5 => Absolute path to the folder containing the rendered file. 12 | :: %6 => Absolute path to the processed AE project file. 13 | :: %7 => Absolute path to the folder containing the processed AE project file. 14 | :: %8 => Absolute path to a .json file containing all job's column values 15 | :: 16 | :: Provided for your personal or commercial use by Dataclay, LLC 17 | 18 | @ECHO OFF 19 | 20 | SET log=%7\post-job.log 21 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 22 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 23 | 24 | echo -------- [TEMPLATER JOB] -------- >> %log% 25 | echo Job completed on %mydate% @ %mytime% >> %log% 26 | echo Rendered job with ID %2 to %4 >> %log% 27 | echo Job details as JSON are found in file %8 >> %log% 28 | echo( >> %log% 29 | -------------------------------------------------------------------------------- /macOS/event-logger.sh: -------------------------------------------------------------------------------- 1 | # +--------------------------------------------------------------------+ 2 | # | ____ __ __ | 3 | # | / __ \____ _/ /_____ ______/ /___ ___ __ | 4 | # | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 5 | # | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 6 | # | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 7 | # | Automating Digital Production /____/ | 8 | # | | 9 | # | | 10 | # | We believe that leveraging data in the design process should | 11 | # | be a playful and rewarding art. Our products make this | 12 | # | possible for digital content creators. | 13 | # | | 14 | # | |email |web |twitter | 15 | # | |support@dataclay.com |dataclay.com |@dataclay | 16 | # | | 17 | # | This code is provided to you for your personal or commercial | 18 | # | use. However, you must abide by the terms of the MIT | 19 | # | License: https://opensource.org/licenses/MIT | 20 | # | | 21 | # | | 22 | # | Copyright 2013-2018 Dataclay, LLC | 23 | # | Licensed under the MIT License | 24 | # | | 25 | # +--------------------------------------------------------------------+ 26 | 27 | # This is a sample bash script for learning how shell scripts can work 28 | # with Templater Bot on macOS. 29 | # 30 | # Follow these steps to use this script: 31 | # 32 | # 1. Open an After Effects project that is mapped to a data source 33 | # using Templater. 34 | # 35 | # 2. Open the main Templater panel. Open the `Templater Preferences` 36 | # dialog. Click the `Setup Shell Scripts` button. 37 | # 38 | # 3. Register the following to any orall of the events 39 | # 40 | # /path/to/event-logger.sh $event $id $aep_dir $log $data_job $aep $out_dir $out_name $data_batch $bot_name $machine_name $user_name $sources $data_start $data_end $out_file $now 41 | # 42 | # 4. Iterate through "Previews" of jobs as stored in Templater's 43 | # connected data source. 44 | # 45 | # 5. Inspect the `events.log` file that will be created in the same 46 | # directory as the After Effects project file. 47 | 48 | 49 | event="$1" 50 | job_id="$2" 51 | aep_loc="$3" 52 | templater_log="$4" 53 | data_job="$5" 54 | aep="$6" 55 | out_dir="$7" 56 | out_name="$8" 57 | data_batch="$9" 58 | bot_name="${10}" 59 | machine="${11}" 60 | user="${12}" 61 | sources="${13}" 62 | data_start="${14}" 63 | data_end="${15}" 64 | out_file="${16}" 65 | now="${17}" 66 | 67 | #The `events.log` file will be in the same directory as the AEP file 68 | log="$aep_loc/events.log" 69 | 70 | if [ $event = "bot_pre_data" ] 71 | then 72 | printf " - $(date) [ TEMPLATER EVENT : $event ] - \n\n" >> "$log" 73 | 74 | printf "\tTemplater Log File => $templater_log\n" >> "$log" 75 | printf "\tMachine Name => $machine\n" >> "$log" 76 | printf "\tUser Name => $user\n" >> "$log" 77 | printf "\tTimestamp => $now\n" >> "$log" 78 | printf "\tSource Location => $sources\n" >> "$log" 79 | printf "\tData Start => $data_start\n" >> "$log" 80 | printf "\tData End => $data_end\n" >> "$log" 81 | printf "\tBot Name => $bot_name\n" >> "$log" 82 | printf "\tAEP File => $aep\n" >> "$log" 83 | printf "\tAEP Location => $aep_loc\n" >> "$log" 84 | printf "\tOutput Location => $out_dir\n" >> "$log" 85 | 86 | 87 | else 88 | 89 | if [ $event = "bot_pre_job" ] || [ $event = "bot_post_job" ] 90 | then 91 | 92 | if [ $event = "bot_pre_job" ] 93 | then 94 | printf "\n" >> "$log" 95 | fi 96 | 97 | printf "\t- $(date) [ TEMPLATER EVENT : $event : JOB ID - $job_id ] - \n\n" >> "$log" 98 | 99 | if [ $event = "bot_post_job" ] 100 | then 101 | printf "" >> "$log" 102 | printf "\tJob Data File => $data_job\n" >> "$log" 103 | printf "\tTemplater Output Name => $out_name\n" >> "$log" 104 | printf "\tTemplater Output File => $out_file\n" >> "$log" 105 | fi 106 | 107 | else 108 | 109 | if [ $event = "bot_pre_layr" ] || [ $event = "bot_post_layr" ] || [ $event = "bot_pre_rndr" ] || [ $event = "bot_post_rndr" ] 110 | then 111 | printf "\t\t- $(date) [ TEMPLATER EVENT : $event ] - \n" >> "$log" 112 | else 113 | printf " - $(date) [ TEMPLATER EVENT : $event ] - \n" >> "$log" 114 | fi 115 | 116 | fi 117 | 118 | fi 119 | 120 | printf "\n" >> "$log" 121 | 122 | if [ $event = "bot_post_batch" ] 123 | then 124 | printf "\tBatch Data File => $data_batch\n" >> "$log" 125 | printf "\n# # #\n\n" >> "$log" 126 | fi 127 | -------------------------------------------------------------------------------- /macOS/on-bot-disable.php: -------------------------------------------------------------------------------- 1 | #!/usr/local/opt/php55/bin/php 2 | The given name of The Bot as found in Templater's Preferences dialog 15 | * $argv[2] => Absolute path to the AE project file being processed at the time of disable 16 | * $argv[3] => Absolute path to the folder containing the AE project file being processed 17 | * 18 | * Provide to you for your personal or commercial use by Dataclay, LLC. 19 | * 20 | * NOTE: THIS IS ONLY SAMPLE CODE. IT WILL LIKELY NOT WORK IN YOUR ENVIRONMENT. 21 | * THIS SCRIPT IS INCLUDING OTHER SCRIPTS NOT PRESENT IN THIS CODE. 22 | */ 23 | 24 | include_once('smtp.conf.php'); 25 | require_once 'vendor/swiftmailer/swiftmailer/lib/swift_required.php'; 26 | 27 | $senderName = "Dataclay Information"; 28 | $senderEmail = 'info@dataclay.io'; 29 | $recipientEmail = 'support@dataclay.io'; 30 | $recipientName = 'Dataclay Support'; 31 | $subject = "The Bot for Templater is now disabled"; 32 | 33 | $datetime = date('Y-m-d H:i:s'); 34 | 35 | $senderMsg = "The Bot for Templater was disabled at " . $datetime . "\n\n[Bot Name]\n" . $argv[1] . "\n\n[AE Project File]\n" . $argv[2] . "\n\n[AE Project Folder]\n" . $argv[3] . "\n\nThis could have happened manually or because of an error.\n\nRegards,\nDataclay Support"; 36 | 37 | $transport = Swift_SmtpTransport::newInstance($dclay_smtp_address, $dclay_smtp_port, 'ssl') 38 | ->setUsername($userEmail) 39 | ->setPassword($dclay_smtp_password); 40 | 41 | $mailer = Swift_Mailer::newInstance($transport); 42 | 43 | $message = Swift_Message::newInstance() 44 | ->setSubject($subject) 45 | ->setFrom(array($senderEmail => $senderName)) 46 | ->setTo(array($recipientEmail => $recipientName)) 47 | ->setBody($senderMsg); 48 | 49 | $result = $mailer->send($message); 50 | 51 | ?> 52 | -------------------------------------------------------------------------------- /macOS/on-bot-disable.sh: -------------------------------------------------------------------------------- 1 | # Sample bash script for Bot-Shutdown Event 2 | # If you enable 'For all commands, use job details as arguments' 3 | # some details about the just-finished batch will be appended to the 4 | # command as arguments. On windows, there was no easy way to call 5 | # the php script directly, so we call php via a batch file. Furthermore, 6 | # we had to provide an absolute path to the php executable even though 7 | # php.exe is in the environment path. 8 | # 9 | # The order or the arguments is as follows: 10 | # 11 | # $1 => The given name of The Bot as found in Templater's Preferences dialog 12 | # $2 => Absolute path to the AE project file being processed at the time of disable 13 | # $3 => Absolute path to the folder containing the AE project file being processed 14 | # 15 | # Provided for your personal or commercial use by Dataclay, LLC 16 | 17 | log="$3/templater-bot.log" 18 | echo "-------- [TEMPLATER BOT] --------" >> "$log" 19 | echo "" >> "$log" 20 | echo " The bot went down at $(date)" >> "$log" 21 | echo " Sending email notifice" >> "$log" 22 | /usr/local/opt/php55/bin/php -f "/Users/arie/Dev/Templater/Scripts/on-bot-disable.php" -- "$1" "$2" "$3" 23 | echo " Done sending email notice" >> "$log" 24 | echo "" >> "$log" 25 | 26 | -------------------------------------------------------------------------------- /macOS/post-batch.sh: -------------------------------------------------------------------------------- 1 | # 2 | # Sample bash script for Post-Batch Bot Event 3 | # If you enable 'For all commands, use job details as arguments' 4 | # some details about the just-finished batch will be appended to the 5 | # command as arguments. 6 | # 7 | # Argument order is as follows for render operations after each job completes 8 | # $1 => Absolute path to that JSON file containing jobs in most recently completed batch. 9 | # $2 => Absolute path to the processed AE proejct file. 10 | # $3 => Absolute path to the folder containing the processed AE project file. 11 | # $4 => Absolute path to the root of the specified output location. 12 | # 13 | # Provided for your personal or commercial use by Dataclay, LLC. 14 | 15 | log="$3/post-batch.log" 16 | echo "-------- [TEMPLATER BATCH] --------" >> "$log" 17 | echo "" >> "$log" 18 | echo " Batch completed on $(date)" >> "$log" 19 | echo "" >> "$log" 20 | echo " Batch details as JSON are found at "$1 >> "$log" 21 | echo "" >> "$log" 22 | echo " Output files in batch operation exist in "$4 >> "$log" 23 | echo "" >> "$log" 24 | -------------------------------------------------------------------------------- /macOS/post-job.sh: -------------------------------------------------------------------------------- 1 | # 2 | # Sample bash script for Post-Job Bot Event 3 | # If you enable 'For all commands, use job details as arguments' 4 | # some details about the just-finished job will be appended to the 5 | # command as arguments. 6 | # 7 | # Argument order is as follows for render operations after each job completes 8 | # $1 => The row index in the spreadsheet. This is always `null` when Bot is enabled. 9 | # $2 => The value of the job's ID column if it has one, `null` if no ID value. 10 | # $3 => The value of the job's devised output name. 11 | # $4 => Absolute path to the final rendered file if it was rendered. 12 | # $5 => Absolute path to the folder containing the rendered file. 13 | # $6 => Absolute path to the processed AE project file. 14 | # $7 => Absolute path to the folder containing the processed AE project file. 15 | # $8 => Absolute path to a .json file containing all job's column values 16 | # 17 | # Provided for your personal or commercial use by Dataclay, LLC. 18 | 19 | log="$7/post-job.log" 20 | echo "-------- [TEMPLATER JOB] --------" >> "$log" 21 | echo "" >> "$log" 22 | echo " Job completed on $(date)" >> "$log" 23 | echo "" >> "$log" 24 | echo " Rendered job with ID \`"$2"\` to "$4 >> "$log" 25 | echo "" >> "$log" 26 | echo " Job details as JSON are found in file "$8 >> "$log" 27 | echo "" >> "$log" 28 | -------------------------------------------------------------------------------- /macOS/spot-logger.sh: -------------------------------------------------------------------------------- 1 | spot_log=$1/spots.log 2 | now=$2 3 | spotA=$3 4 | spotB=$4 5 | spotC=$5 6 | spotD=$6 7 | 8 | 9 | echo "\n----- SPOT CHECKS ON [ $now ] -----\n " >> $spot_log 10 | echo "\tSpot Check A => $spotA\n" >> $spot_log 11 | echo "\tSpot Check B => $spotB\n" >> $spot_log 12 | echo "\tSpot Check C => $spotC\n" >> $spot_log 13 | echo "\tSpot Check C => $spotD\n" >> $spot_log 14 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "event-scripts", 3 | "version": "1.0.0", 4 | "description": "Node JS event scripts for Templater for Adobe After Effects", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://ariestav@github.com/dataclay/event-scripts.git" 12 | }, 13 | "keywords": [ 14 | "dataclay", 15 | "after", 16 | "effects", 17 | "adobe", 18 | "variable", 19 | "video", 20 | "dynamic", 21 | "video", 22 | "video", 23 | "data" 24 | ], 25 | "author": "Arie Stavchansky", 26 | "license": "MIT", 27 | "bugs": { 28 | "url": "https://github.com/dataclay/event-scripts/issues" 29 | }, 30 | "homepage": "https://github.com/dataclay/event-scripts#readme", 31 | "dependencies": { 32 | "archiver": "^2.1.1", 33 | "async": "^2.6.4", 34 | "aws-sdk": "^2.814.0", 35 | "axios": "^0.21.2", 36 | "bug-killer": "^4.4.4", 37 | "extend": "^3.0.2", 38 | "ffmpeg-on-progress": "^1.0.0", 39 | "fluent-ffmpeg": "^2.0.1", 40 | "fs-extra": "^6.0.1", 41 | "glob": "^7.1.3", 42 | "google-spreadsheet": "^2.0.7", 43 | "googleapis": "^39.1.0", 44 | "https-proxy-agent": "^2.2.4", 45 | "jwplatform-api": "git+https://github.com/dataclay/jwplatform-api.git", 46 | "minimist": "^1.2.6", 47 | "moment": "^2.22.2", 48 | "node-emoji": "^1.5.1", 49 | "node-uuid": "^1.4.7", 50 | "npm": "^9.4.1", 51 | "open": "6.0.0", 52 | "opn": "^5.3.0", 53 | "pad": "^2.1.0", 54 | "pretty-bytes": "^5.1.0", 55 | "q": "^1.4.1", 56 | "r-json": "^1.2.8", 57 | "readline-sync": "^1.4.9", 58 | "sheetrock": "^1.1.4", 59 | "single-line-log": "^1.1.2", 60 | "sprintf-js": "^1.1.1", 61 | "uuid": "^3.3.2", 62 | "vimeo": "^2.1.0", 63 | "winston": "^3.1.0", 64 | "write-json": "^3.0.1", 65 | "youtube-api": "^2.0.10" 66 | }, 67 | "devDependencies": { 68 | "madge": "^4.0.1" 69 | } 70 | } 71 | --------------------------------------------------------------------------------