├── NodeJS ├── deploy-output │ ├── credentials │ │ ├── aws-credentials.json │ │ ├── jwplatform-credentials.json │ │ ├── templater-uploader-vimeo.json │ │ ├── templater-uploader-youtube.json │ │ └── google-service-account-creds.json │ ├── logger.js │ ├── aws.js │ ├── youtube.js │ ├── api.js │ ├── jwplatform.js │ ├── stream.js │ ├── constants.js │ ├── app.js │ ├── deploy.js │ ├── gsheet.js │ ├── publish.js │ ├── README.md │ ├── config.js │ └── vimeo.js ├── watch-logs.bat ├── post_job.js ├── concatenate.js ├── log-footage-processing.js ├── log-footage-download.js └── package-sequence.js ├── .editorconfig ├── macOS ├── spot-logger.sh ├── post-batch.sh ├── on-bot-disable.sh ├── post-job.sh ├── on-bot-disable.php └── event-logger.sh ├── .gitignore ├── LICENSE ├── Windows ├── post-batch.bat ├── on-bot-disable.bat ├── post-job.bat ├── on-bot-disable-win.php └── event-logger.bat ├── package.json └── ExtendScript ├── adjust-target-workarea.jsx ├── truncate-long-string.jsx ├── font-swapping.jsx └── event-logger.jsx /NodeJS/deploy-output/credentials/aws-credentials.json: -------------------------------------------------------------------------------- 1 | { 2 | "accessKeyID" : "" 3 | , "secretAccessKey" : "" 4 | } -------------------------------------------------------------------------------- /NodeJS/watch-logs.bat: -------------------------------------------------------------------------------- 1 | start cmd /k "bash -c "tail -f /z/Dev/event-scripts/NodeJS/transcode_out.log" " 2 | start cmd /k "bash -c "tail -f /z/Dev/event-scripts/NodeJS/deploy-output/deploy-out.log" " -------------------------------------------------------------------------------- /NodeJS/deploy-output/credentials/jwplatform-credentials.json: -------------------------------------------------------------------------------- 1 | { 2 | "user" : "" 3 | , "key" : "" 4 | , "secret" : "" 5 | } -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # This file is for unifying the coding style for different editors and IDEs 2 | # editorconfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | indent_style = space 12 | indent_size = 4 13 | -------------------------------------------------------------------------------- /macOS/spot-logger.sh: -------------------------------------------------------------------------------- 1 | spot_log=$1/spots.log 2 | now=$2 3 | spotA=$3 4 | spotB=$4 5 | spotC=$5 6 | spotD=$6 7 | 8 | 9 | echo "\n----- SPOT CHECKS ON [ $now ] -----\n " >> $spot_log 10 | echo "\tSpot Check A => $spotA\n" >> $spot_log 11 | echo "\tSpot Check B => $spotB\n" >> $spot_log 12 | echo "\tSpot Check C => $spotC\n" >> $spot_log 13 | echo "\tSpot Check C => $spotD\n" >> $spot_log 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | *.monopic 4 | /.vscode/launch.json 5 | /ExtendScript/launch.json 6 | /NodeJS/deploy-output/deploy-err.log 7 | /NodeJS/deploy-output/deploy-out.log 8 | /NodeJS/transcode_err.log 9 | /NodeJS/transcode_out.log 10 | /NodeJS/deploy-output/test-cmd.txt 11 | /NodeJS/deploy-output/registered-events.txt 12 | /NodeJS/deploy-output/dependencies.js 13 | /Tests 14 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/credentials/templater-uploader-vimeo.json: -------------------------------------------------------------------------------- 1 | { 2 | "user" : "" 3 | , "client_id" : "" 4 | , "client_secret" : "" 5 | , "auth_url" : "https://api.vimeo.com/oauth/authorize" 6 | , "access_token_url" : "https://api.vimeo.com/oauth/access_token" 7 | , "redirect_url" : "" 8 | } -------------------------------------------------------------------------------- /NodeJS/deploy-output/credentials/templater-uploader-youtube.json: -------------------------------------------------------------------------------- 1 | //Retrieve this file from the Google API console to allow this app to communicate to your YouTube account. 2 | //Start the process by visiting https://console.cloud.google.com/apis/dashboard 3 | //Choose the "Google Data API v3", create a project and download this file. 4 | { 5 | "installed":{ 6 | "client_id":"", 7 | "project_id":"", 8 | "auth_uri":"https://accounts.google.com/o/oauth2/auth", 9 | "token_uri":"https://accounts.google.com/o/oauth2/token", 10 | "auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs", 11 | "client_secret":"", 12 | "redirect_uris":[" Absolute path to that JSON file containing jobs in most recently completed batch. 9 | # $2 => Absolute path to the processed AE proejct file. 10 | # $3 => Absolute path to the folder containing the processed AE project file. 11 | # $4 => Absolute path to the root of the specified output location. 12 | # 13 | # Provided for your personal or commercial use by Dataclay, LLC. 14 | 15 | log="$3/post-batch.log" 16 | echo "-------- [TEMPLATER BATCH] --------" >> "$log" 17 | echo "" >> "$log" 18 | echo " Batch completed on $(date)" >> "$log" 19 | echo "" >> "$log" 20 | echo " Batch details as JSON are found at "$1 >> "$log" 21 | echo "" >> "$log" 22 | echo " Output files in batch operation exist in "$4 >> "$log" 23 | echo "" >> "$log" 24 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/credentials/google-service-account-creds.json: -------------------------------------------------------------------------------- 1 | //Retrieve this file from the Google API console to allow this app to communicate to your sheets. Create a service account to retrieve it. 2 | //Start the process by visiting https://console.cloud.google.com/apis/dashboard 3 | //Choose the "Google Sheets API", create a project with a service account and download your file 4 | { 5 | "type": "service_account", 6 | "project_id": "", 7 | "private_key_id": "", 8 | "private_key": "", 9 | "client_email": "", 10 | "client_id": "", 11 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 12 | "token_uri": "https://accounts.google.com/o/oauth2/token", 13 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 14 | "client_x509_cert_url": "" 15 | } 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Dataclay 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /Windows/post-batch.bat: -------------------------------------------------------------------------------- 1 | :: Sample bash script for Post-Batch Bot Event 2 | :: If you enable 'For all commands, use job details as arguments' 3 | :: some details about the just-finished batch will be appended to the 4 | :: command as arguments. 5 | :: 6 | :: Argument order is as follows for render operations after each job completes 7 | :: %1 => Absolute path to that JSON file containing jobs in most recently completed batch. 8 | :: %2 => Absolute path to the processed AE project file. 9 | :: %3 => Absolute path to the folder containing the processed AE project file. 10 | :: %4 => Absolute path to the root of the specified output location 11 | :: 12 | :: Provided for your personal or commercial use by Dataclay, LLC 13 | 14 | @ECHO ON 15 | 16 | SET log=%3\post-batch.log 17 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 18 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 19 | 20 | echo -------- [TEMPLATER BATCH] -------- >> %log% 21 | echo Batch completed on %mydate% @ %mytime% >> %log% 22 | echo Batch details as JSON are found in file %1 >> %log% 23 | echo Output files in batch operation exist in %4 >> %log% 24 | -------------------------------------------------------------------------------- /macOS/on-bot-disable.sh: -------------------------------------------------------------------------------- 1 | # Sample bash script for Bot-Shutdown Event 2 | # If you enable 'For all commands, use job details as arguments' 3 | # some details about the just-finished batch will be appended to the 4 | # command as arguments. On windows, there was no easy way to call 5 | # the php script directly, so we call php via a batch file. Furthermore, 6 | # we had to provide an absolute path to the php executable even though 7 | # php.exe is in the environment path. 8 | # 9 | # The order or the arguments is as follows: 10 | # 11 | # $1 => The given name of The Bot as found in Templater's Preferences dialog 12 | # $2 => Absolute path to the AE project file being processed at the time of disable 13 | # $3 => Absolute path to the folder containing the AE project file being processed 14 | # 15 | # Provided for your personal or commercial use by Dataclay, LLC 16 | 17 | log="$3/templater-bot.log" 18 | echo "-------- [TEMPLATER BOT] --------" >> "$log" 19 | echo "" >> "$log" 20 | echo " The bot went down at $(date)" >> "$log" 21 | echo " Sending email notifice" >> "$log" 22 | /usr/local/opt/php55/bin/php -f "/Users/arie/Dev/Templater/Scripts/on-bot-disable.php" -- "$1" "$2" "$3" 23 | echo " Done sending email notice" >> "$log" 24 | echo "" >> "$log" 25 | 26 | -------------------------------------------------------------------------------- /macOS/post-job.sh: -------------------------------------------------------------------------------- 1 | # 2 | # Sample bash script for Post-Job Bot Event 3 | # If you enable 'For all commands, use job details as arguments' 4 | # some details about the just-finished job will be appended to the 5 | # command as arguments. 6 | # 7 | # Argument order is as follows for render operations after each job completes 8 | # $1 => The row index in the spreadsheet. This is always `null` when Bot is enabled. 9 | # $2 => The value of the job's ID column if it has one, `null` if no ID value. 10 | # $3 => The value of the job's devised output name. 11 | # $4 => Absolute path to the final rendered file if it was rendered. 12 | # $5 => Absolute path to the folder containing the rendered file. 13 | # $6 => Absolute path to the processed AE project file. 14 | # $7 => Absolute path to the folder containing the processed AE project file. 15 | # $8 => Absolute path to a .json file containing all job's column values 16 | # 17 | # Provided for your personal or commercial use by Dataclay, LLC. 18 | 19 | log="$7/post-job.log" 20 | echo "-------- [TEMPLATER JOB] --------" >> "$log" 21 | echo "" >> "$log" 22 | echo " Job completed on $(date)" >> "$log" 23 | echo "" >> "$log" 24 | echo " Rendered job with ID \`"$2"\` to "$4 >> "$log" 25 | echo "" >> "$log" 26 | echo " Job details as JSON are found in file "$8 >> "$log" 27 | echo "" >> "$log" 28 | -------------------------------------------------------------------------------- /Windows/on-bot-disable.bat: -------------------------------------------------------------------------------- 1 | :: Sample batch script for Bot-Shutdown Event 2 | :: If you enable 'For all commands, use job details as arguments' 3 | :: some details about the just-finished batch will be appended to the 4 | :: command as arguments. On windows, there was no easy way to call 5 | :: the php script directly, so we call php via a batch file. Furthermore, 6 | :: we had to provide an absolute path to the php executable even though 7 | :: php.exe is in the environment path. 8 | :: 9 | :: The order or the arguments is as follows: 10 | :: 11 | :: %1 => The given name of The Bot as found in Templater's Preferences dialog 12 | :: %2 => Absolute path to the AE project file being processed at the time of disable 13 | :: %3 => Absolute path to the folder containing the AE project file being processed 14 | :: 15 | :: Provided for your personal or commercial use by Dataclay, LLC 16 | 17 | @ECHO OFF 18 | 19 | SET log=%3\templater-bot.log 20 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 21 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 22 | 23 | echo -------- [TEMPLATER BOT] -------- >> %log% 24 | echo The bot went down on %mydate% @ %mytime% >> %log% 25 | echo Sending email notice >> %log% 26 | "C:\Program Files (x86)\PHP\php.exe" "L:\event-scripts\Windows\on-bot-disable-win.php" %1 %2 %3 27 | echo Done sending email notice >> %log% 28 | 29 | 30 | -------------------------------------------------------------------------------- /Windows/post-job.bat: -------------------------------------------------------------------------------- 1 | :: Sample batch script for Post-Job Bot Event 2 | :: If you enable 'For all commands, use job details as arguments' 3 | :: some details about the just-finished job will be appended to the 4 | :: command as arguments. 5 | :: 6 | :: Argument order is as follows for render operations after each job completes 7 | :: %1 => The row index in the spreadsheet. This is always `null` when Bot is enabled. 8 | :: %2 => The value of the job's ID column if it has one, `null` if no ID value. 9 | :: %3 => The value of the job's devised output name. 10 | :: %4 => Absolute path to the final rendered file if it was rendered. 11 | :: %5 => Absolute path to the folder containing the rendered file. 12 | :: %6 => Absolute path to the processed AE project file. 13 | :: %7 => Absolute path to the folder containing the processed AE project file. 14 | :: %8 => Absolute path to a .json file containing all job's column values 15 | :: 16 | :: Provided for your personal or commercial use by Dataclay, LLC 17 | 18 | @ECHO OFF 19 | 20 | SET log=%7\post-job.log 21 | For /f "tokens=2-4 delims=/ " %%a in ('date /t') do (set mydate=%%c-%%a-%%b) 22 | For /f "tokens=1-2 delims=/:" %%a in ('time /t') do (set mytime=%%a%%b) 23 | 24 | echo -------- [TEMPLATER JOB] -------- >> %log% 25 | echo Job completed on %mydate% @ %mytime% >> %log% 26 | echo Rendered job with ID %2 to %4 >> %log% 27 | echo Job details as JSON are found in file %8 >> %log% 28 | echo( >> %log% 29 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/logger.js: -------------------------------------------------------------------------------- 1 | var winston = require('winston'), 2 | moment = require('moment'); 3 | 4 | const { createLogger, format, transports } = require('winston'); 5 | const logger = winston.createLogger({ 6 | 7 | transports : [ 8 | 9 | new winston.transports.Console({ 10 | prettyPrint : true 11 | , format : format.combine( 12 | format.splat(), 13 | format.simple(), 14 | format.printf(info => `${info.message}`) 15 | ) 16 | }), 17 | 18 | new winston.transports.File({ 19 | filename : `${__dirname}/deploy-out.log` 20 | , level : 'info' 21 | , prettyPrint : true 22 | , format : format.combine( 23 | format.splat(), 24 | format.simple(), 25 | format.printf(info => `${info.message}`) 26 | ) 27 | }), 28 | 29 | new winston.transports.File({ 30 | filename : `${__dirname}/deploy-err.log` 31 | , level : 'error' 32 | , prettyPrint : true 33 | , format : format.combine( 34 | format.splat(), 35 | format.simple(), 36 | winston.format.printf(info => `${"\n----- Reported on [ " + moment().format('MMMM Do YYYY, h:mm:ss A') + " ] -----------------------\n\n"} \t${info.message}`) 37 | ) 38 | }) 39 | 40 | ] 41 | 42 | }) 43 | 44 | module.exports = logger; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "event-scripts", 3 | "version": "1.0.0", 4 | "description": "Node JS event scripts for Templater for Adobe After Effects", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://ariestav@github.com/dataclay/event-scripts.git" 12 | }, 13 | "keywords": [ 14 | "dataclay", 15 | "after", 16 | "effects", 17 | "adobe", 18 | "variable", 19 | "video", 20 | "dynamic", 21 | "video", 22 | "video", 23 | "data" 24 | ], 25 | "author": "Arie Stavchansky", 26 | "license": "MIT", 27 | "bugs": { 28 | "url": "https://github.com/dataclay/event-scripts/issues" 29 | }, 30 | "homepage": "https://github.com/dataclay/event-scripts#readme", 31 | "dependencies": { 32 | "archiver": "^2.1.1", 33 | "async": "^2.6.4", 34 | "aws-sdk": "^2.814.0", 35 | "axios": "^0.21.2", 36 | "bug-killer": "^4.4.4", 37 | "extend": "^3.0.2", 38 | "ffmpeg-on-progress": "^1.0.0", 39 | "fluent-ffmpeg": "^2.0.1", 40 | "fs-extra": "^6.0.1", 41 | "glob": "^7.1.3", 42 | "google-spreadsheet": "^2.0.7", 43 | "googleapis": "^39.1.0", 44 | "https-proxy-agent": "^2.2.4", 45 | "jwplatform-api": "git+https://github.com/dataclay/jwplatform-api.git", 46 | "minimist": "^1.2.6", 47 | "moment": "^2.22.2", 48 | "node-emoji": "^1.5.1", 49 | "node-uuid": "^1.4.7", 50 | "npm": "^9.4.1", 51 | "open": "6.0.0", 52 | "opn": "^5.3.0", 53 | "pad": "^2.1.0", 54 | "pretty-bytes": "^5.1.0", 55 | "q": "^1.4.1", 56 | "r-json": "^1.2.8", 57 | "readline-sync": "^1.4.9", 58 | "sheetrock": "^1.1.4", 59 | "single-line-log": "^1.1.2", 60 | "sprintf-js": "^1.1.1", 61 | "uuid": "^3.3.2", 62 | "vimeo": "^2.1.0", 63 | "winston": "^3.1.0", 64 | "write-json": "^3.0.1", 65 | "youtube-api": "^2.0.10" 66 | }, 67 | "devDependencies": { 68 | "madge": "^4.0.1" 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/aws.js: -------------------------------------------------------------------------------- 1 | var log = require('./logger'), 2 | async = require('async'), 3 | fs = require('fs'), 4 | AWS = require('aws-sdk'), 5 | enums = require('./constants'), 6 | emoji = require('node-emoji'), 7 | config = require('./config'), 8 | path = require('path'); 9 | 10 | var aws = { 11 | 12 | asset : null, 13 | 14 | S3_URL : { 15 | 16 | video : null 17 | , poster : null 18 | , preview : null 19 | 20 | }, 21 | 22 | config : function(step) { 23 | 24 | log.info("\n\t\tSetting AWS Credentials"); 25 | 26 | AWS.config.update({ 27 | accessKeyId : config.params.storage.accessID //enums.aws.accessKeyId, 28 | , secretAccessKey : config.params.storage.secret //enums.aws.secretAccessKey, 29 | , signatureVersion : enums.aws.signatureVersion 30 | }); 31 | 32 | step(); 33 | 34 | }, 35 | 36 | put_obj : function(b64, step) { 37 | 38 | var p = config.params, 39 | s3 = new AWS.S3(); 40 | 41 | if (!(p.storage.accessID) || 42 | !(p.storage.secret) || 43 | !(enums.aws.signatureVersion)) { 44 | 45 | log.info("\n\t\t... simulation upload ...") 46 | step(); 47 | 48 | } else { 49 | 50 | log.info("\n\t\t%s\tPutting [ %s ] into [ %s ] bucket within [ %s ] folder" 51 | , emoji.get('package') 52 | , path.parse(aws.asset).base 53 | , p.storage.bucket 54 | , p.storage.folder); 55 | 56 | s3.putObject({ 57 | Bucket : p.storage.bucket 58 | , Key : (p.storage.folder + "/" + path.parse(aws.asset).base) 59 | , Body : b64 60 | , ACL : 'public-read' 61 | }, (err, data) => { 62 | 63 | if (err) { 64 | log.error(err.message); 65 | throw err; 66 | } 67 | 68 | step(); 69 | 70 | }); 71 | 72 | } 73 | 74 | }, 75 | 76 | download_url : function(file) { 77 | 78 | let p = config.params; 79 | return 'https://' + p.storage.bucket + '.' + enums.aws.s3_default_url + '/' + p.storage.folder + '/' + path.parse(file).base; 80 | 81 | } 82 | 83 | } 84 | 85 | module.exports = aws; -------------------------------------------------------------------------------- /Windows/on-bot-disable-win.php: -------------------------------------------------------------------------------- 1 | The given name of The Bot as found in Templater's Preferences dialog 13 | * $argv[2] => Absolute path to the AE project file being processed at the time of disable 14 | * $argv[3] => Absolute path to the folder containing the AE project file being processed 15 | * 16 | * Provided for your personal or commercial use by Dataclay, LLC. 17 | * 18 | * NOTE: THIS IS ONLY SAMPLE CODE. IT WILL LIKELY NOT WORK IN YOUR ENVIRONMENT. 19 | * THE SCRIPT IS INCLUDING OTHER SCRIPTS NOT PRESENT IN THIS CODE. 20 | */ 21 | 22 | include_once('smtp.conf.php'); 23 | require_once 'vendor/swiftmailer/swiftmailer/lib/swift_required.php'; 24 | 25 | $senderName = "Dataclay Information"; 26 | $senderEmail = 'info@dataclay.io'; 27 | $recipientEmail = 'support@dataclay.io'; 28 | $recipientName = 'Dataclay Support'; 29 | $subject = "The Bot for Templater is now disabled"; 30 | 31 | $datetime = date('Y-m-d H:i:s'); 32 | 33 | $senderMsg = "The Bot for Templater was disabled at " . $datetime . "\n\n[Bot Name]\n" . $argv[1] . "\n\n[AE Project File]\n" . $argv[2] . "\n\n[AE Project Folder]\n" . $argv[3] . "\n\nThis could have happened manually or because of an error.\n\nRegards,\nDataclay Support"; 34 | 35 | $transport = Swift_SmtpTransport::newInstance($dclay_smtp_address, $dclay_smtp_port, 'ssl') 36 | ->setUsername($userEmail) 37 | ->setPassword($dclay_smtp_password); 38 | 39 | $mailer = Swift_Mailer::newInstance($transport); 40 | 41 | $message = Swift_Message::newInstance() 42 | ->setSubject($subject) 43 | ->setFrom(array($senderEmail => $senderName)) 44 | ->setTo(array($recipientEmail => $recipientName)) 45 | ->setBody($senderMsg); 46 | 47 | $result = $mailer->send($message); 48 | 49 | ?> 50 | -------------------------------------------------------------------------------- /macOS/on-bot-disable.php: -------------------------------------------------------------------------------- 1 | #!/usr/local/opt/php55/bin/php 2 | The given name of The Bot as found in Templater's Preferences dialog 15 | * $argv[2] => Absolute path to the AE project file being processed at the time of disable 16 | * $argv[3] => Absolute path to the folder containing the AE project file being processed 17 | * 18 | * Provide to you for your personal or commercial use by Dataclay, LLC. 19 | * 20 | * NOTE: THIS IS ONLY SAMPLE CODE. IT WILL LIKELY NOT WORK IN YOUR ENVIRONMENT. 21 | * THIS SCRIPT IS INCLUDING OTHER SCRIPTS NOT PRESENT IN THIS CODE. 22 | */ 23 | 24 | include_once('smtp.conf.php'); 25 | require_once 'vendor/swiftmailer/swiftmailer/lib/swift_required.php'; 26 | 27 | $senderName = "Dataclay Information"; 28 | $senderEmail = 'info@dataclay.io'; 29 | $recipientEmail = 'support@dataclay.io'; 30 | $recipientName = 'Dataclay Support'; 31 | $subject = "The Bot for Templater is now disabled"; 32 | 33 | $datetime = date('Y-m-d H:i:s'); 34 | 35 | $senderMsg = "The Bot for Templater was disabled at " . $datetime . "\n\n[Bot Name]\n" . $argv[1] . "\n\n[AE Project File]\n" . $argv[2] . "\n\n[AE Project Folder]\n" . $argv[3] . "\n\nThis could have happened manually or because of an error.\n\nRegards,\nDataclay Support"; 36 | 37 | $transport = Swift_SmtpTransport::newInstance($dclay_smtp_address, $dclay_smtp_port, 'ssl') 38 | ->setUsername($userEmail) 39 | ->setPassword($dclay_smtp_password); 40 | 41 | $mailer = Swift_Mailer::newInstance($transport); 42 | 43 | $message = Swift_Message::newInstance() 44 | ->setSubject($subject) 45 | ->setFrom(array($senderEmail => $senderName)) 46 | ->setTo(array($recipientEmail => $recipientName)) 47 | ->setBody($senderMsg); 48 | 49 | $result = $mailer->send($message); 50 | 51 | ?> 52 | -------------------------------------------------------------------------------- /NodeJS/post_job.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Log each Templater versioning job to a file 4 | Copyright (c) Dataclay LLC 2016 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To log information about the most recently completed job, enter the 12 | following command within the "After each job" field found within the 13 | Templater Preferences dialog. If using the Templater CLI, enter it 14 | into the "post_cmd_job" property found within the 15 | templater-options.json file. 16 | 17 | node /path/to/event-scripts/NodeJS/post_job.js --outdir $out_dir --aefile $aep --data $data_job --aedir $aep_dir --outfile $out_file -- $title 18 | 19 | NOTE: The "-- $title" part of the command assumes that your data 20 | source has a column header, or property key, named "title" 21 | 22 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 23 | 24 | //Required NodeJS Modules 25 | var os = require('os'), 26 | fs = require('fs'), 27 | fse = require('fs-extra'), 28 | util = require('util'), 29 | path = require('path'), 30 | moment = require('moment'), 31 | argv = require('minimist')(process.argv.slice(2)); 32 | 33 | var logfile = "templater-post-job.log", 34 | job_data = require(argv.data); 35 | proj = path.resolve(argv.aedir), 36 | log = path.join(os.tmpdir(), logfile), 37 | log_dest = path.join(proj, "templater-post-job.log"), 38 | msg = msg = "\r\n------------ [TEMPLATER JOB] -----------\r\n"; 39 | 40 | 41 | //Design the output for the post job log 42 | msg += "\r\nJob completed processing on\r\n > " + moment().format('MMMM Do YYYY, h:mm:ss a'); 43 | msg += "\r\n\r\nFinished processing project\r\n > " + argv.aefile; 44 | msg += "\r\n\r\nProject directory\r\n > " + proj; 45 | msg += "\r\n\r\nOutput directory\r\n > " + argv.outdir; 46 | msg += "\r\n\r\nOutput asset\r\n > " + argv.outfile; 47 | msg += "\r\n\r\nTitle\r\n > " + argv._[0]; 48 | msg += "\r\n\r\nData set\r\n > " + JSON.stringify(job_data, null, 4).replace(/(?:\r\n|\r|\n)/g, "\r\n "); 49 | msg += "\r\n\r\n" 50 | 51 | 52 | //Append to log and copy log to project directory. 53 | //NOTE: On Windows, NodeJS cannot append to files that 54 | // exist on a mapped network drive. First we 55 | // append a local file in the temp directory, then 56 | // copy it to the project directory 57 | 58 | try { 59 | fs.appendFileSync(log, msg, 'utf8'); 60 | fse.copySync(log, log_dest); 61 | } catch (err) { 62 | console.error(err.message); 63 | fs.appendFileSync("\r\nError : " + err.message); 64 | } 65 | -------------------------------------------------------------------------------- /NodeJS/concatenate.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Concatenate Templater batch output with ffmpeg. 4 | Copyright (c) Dataclay LLC 2016 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To use this script. Make sure to have ffmpeg installed and point to 12 | both the ffmpeg and ffprobe binaries in the code below. Then, make 13 | sure that the script's dependencies are installed by entering `npm 14 | install` in the root of your working directory. 15 | 16 | Enter the following command within the "After all jobs" field found 17 | within the Templater Preferences dialog. If using the Templater CLI, 18 | enter the following command in the "post_cmd_batch" property found 19 | within the templater-options.json file. 20 | 21 | node /path/to/event-scripts/NodeJS/concatenate.js --details $data_batch --outdir $out_dir --outname "finalrender.mov" 22 | 23 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 24 | 25 | //Constants 26 | var ffmpeg_win = "C:\\Program Files\\ffmpeg\\bin\\ffmpeg.exe", 27 | ffprobe_win = "C:\\Program Files\\ffmpeg\\bin\\ffprobe.exe", 28 | ffmpeg_osx = "/usr/local/bin/ffmpeg", 29 | ffprobe_osx = "/usr/local/bin/ffprobe"; 30 | 31 | //Required Node Modules 32 | var os = require('os'), 33 | path = require('path'), 34 | fs = require('fs'), 35 | glob = require('glob'), 36 | ffmpeg = require('fluent-ffmpeg'), 37 | argv = require('minimist')(process.argv.slice(2)); 38 | 39 | var batch_details_json = require(path.resolve(argv.details)), 40 | ffmpeg_cmd = ffmpeg(), 41 | concat_output = path.join(argv.outdir, argv.outname); 42 | 43 | if (process.platform == 'win32') { 44 | ffmpeg.setFfmpegPath(ffmpeg_win); 45 | ffmpeg.setFfprobePath(ffprobe_win); 46 | } else { 47 | ffmpeg.setFfmpegPath(ffmpeg_osx); 48 | ffmpeg.setFfprobePath(ffprobe_osx); 49 | } 50 | 51 | console.log("\n\nGathering scenes and sequencing for movie"); 52 | for (var i=0; i < batch_details_json.length; i++) { 53 | 54 | var input_file = glob.sync(batch_details_json[i]["output_asset"] + ".*", { })[0]; 55 | console.log("\n\tscene " + (i + 1)); 56 | console.log("\t" + input_file); 57 | 58 | ffmpeg_cmd.input(path.resolve(input_file)); 59 | 60 | } 61 | 62 | ffmpeg_cmd.on('start', (command) => { 63 | console.log('\n\nStarting concatenation of output:\n\n\t' + command); 64 | }); 65 | 66 | ffmpeg_cmd.on('error', (err, stdout, stderr) => { 67 | console.log("\nError: " + err.message); 68 | console.log(err.stack); 69 | }); 70 | 71 | ffmpeg_cmd.on('end', (stdout, err) => { 72 | console.log("\n\nFinal movie"); 73 | console.log("\n\t" + concat_output); 74 | }); 75 | 76 | ffmpeg_cmd.videoCodec('libx264') 77 | .outputOption('-pix_fmt yuv420p') 78 | .noAudio(); 79 | 80 | ffmpeg_cmd.mergeToFile(concat_output); 81 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/youtube.js: -------------------------------------------------------------------------------- 1 | var enums = require('./constants'), 2 | config = require('./config'), 3 | moment = require('moment'), 4 | ytLogger = require("bug-killer"), 5 | fs = require("fs"), 6 | readJson = require("r-json"), 7 | rline = require('readline-sync'), 8 | //rl = rline.createInterface(process.stdin, process.stdout), 9 | api = null; 10 | 11 | const Youtube = require("youtube-api"), 12 | opn = require("opn"), 13 | prettyBytes = require("pretty-bytes"); 14 | 15 | var yt = { 16 | 17 | get : function(step) { 18 | 19 | let oauth = Youtube.authenticate({ 20 | type : "oauth" 21 | , client_id : config.params.video.key 22 | , client_secret : config.params.video.secret 23 | , redirect_url : config.params.auth.yt.creds.installed.redirect_uris[0] 24 | }) 25 | 26 | //TODO: Check if there is an existing access token saved to file. If not then open browser and prompt for access code. 27 | if (!config.params.prefs.oauth.youtube || !config.params.prefs.oauth.youtube.refresh_token) { 28 | 29 | opn(oauth.generateAuthUrl({ 30 | access_type: "offline" 31 | , scope: ["https://www.googleapis.com/auth/youtube.upload"] 32 | })); 33 | 34 | var auth_code = rline.question("\n\nFirst grant this application authorization to access your YouTube account.\n\nPlease enter in the authorization code from your browser and press the key\n"); 35 | 36 | oauth.getToken(auth_code, (err, tokens) => { 37 | 38 | if (err) { 39 | throw err; 40 | } 41 | 42 | config.params.prefs.oauth.youtube = tokens; 43 | config.write_prefs(config.params.prefs); 44 | step(); 45 | 46 | }); 47 | 48 | } else { 49 | 50 | console.log("\nAttempting to retreive access token using refresh token. Please wait...."); 51 | step(); 52 | 53 | } 54 | 55 | }, 56 | 57 | video : { 58 | 59 | key : null, 60 | 61 | create : function(row, step) { 62 | 63 | var vid = { 64 | download_url : row.s3_url 65 | , title : config.params.video.title 66 | , description : config.params.video.desc 67 | , author : config.params.user.name 68 | , expires_date : moment().add(1, 'years').unix() 69 | } 70 | 71 | api.post('/v1/videos/create', vid, null, function(err, results){ 72 | 73 | if (err) { 74 | console.log(err) 75 | } else { 76 | jw.video.key = results.video.key; 77 | } 78 | 79 | step(); 80 | 81 | }); 82 | 83 | } 84 | 85 | } 86 | 87 | } 88 | 89 | module.exports = yt; -------------------------------------------------------------------------------- /NodeJS/log-footage-processing.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Logs pre and post footage processing in Templater for After Effects 4 | Copyright (c) Dataclay LLC 2024 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To log information about footage assets Templater retrieves, enter the 12 | following command within the "Before footage download" and 13 | "After footage download" fields found within the Templater Preferences 14 | dialog. If using the Templater CLI, enter it into the "pre_cmd_ftg" 15 | and "post_cmd_ftg" properties found within the templater-options.json file. 16 | 17 | 18 | /path/to/node /path/to/log-footage-processing.js --event $event --aefile $aep --data $data_job --aedir $aep_dir --layer $ftg_layer --name $ftg_name --file $ftg_file --dir $ftg_dir --ext $ftg_ext 19 | 20 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 21 | 22 | //Required NodeJS Modules 23 | var os = require('os'), 24 | fs = require('fs'), 25 | fse = require('fs-extra'), 26 | util = require('util'), 27 | path = require('path'), 28 | moment = require('moment'), 29 | argv = require('minimist')(process.argv.slice(2)); 30 | 31 | var logfile = "templater-ftg-process.log", 32 | job_data = require(argv.data); 33 | proj = path.resolve(argv.aedir), 34 | log = path.join(os.tmpdir(), logfile), 35 | log_dest = path.join(proj, "templater-ftg-process.log"), 36 | msg = msg = "\r\n------------ [TEMPLATER DOWNLOAD] -----------\r\n"; 37 | 38 | 39 | //Design the output for the post job log 40 | msg += "\r\nTemplater Event [ " + argv.event + " ] on [ " + moment().format('MMMM Do YYYY, h:mm:ss a') + "]"; 41 | msg += "\r\n\r\nData set\r\n " + JSON.stringify(job_data, null, 4).replace(/(?:\r\n|\r|\n)/g, "\r\n "); 42 | msg += "\r\n\r\nProject directory\r\n > " + proj; 43 | msg += "\r\n\r\nProject file \r\n > " + path.basename(argv.aefile); 44 | msg += "\r\n\r\nNew footage layer\r\n > " + argv.layer; //$ftg_layer 45 | msg += "\r\n\r\nNew footage file\r\n > " + argv.file; //$ftg_file 46 | msg += "\r\n\r\nNew footage name\r\n > " + argv.name; //$ftg_name 47 | msg += "\r\n\r\nNew footage dir\r\n > " + argv.dir; //$ftg_dir 48 | msg += "\r\n\r\nNew footage extension\r\n > " + argv.ext; //$ftg_ext 49 | msg += "\r\n\r\n"; 50 | 51 | 52 | //Append to log and copy log to project directory. 53 | //NOTE: On Windows, NodeJS cannot append to files that 54 | // exist on a mapped network drive. First we 55 | // append a local file in the temp directory, then 56 | // copy it to the project directory 57 | 58 | try { 59 | fs.appendFileSync(log, msg, 'utf8'); 60 | fse.copySync(log, log_dest); 61 | } catch (err) { 62 | console.error(err.message); 63 | fs.appendFileSync("\r\nError : " + err.message); 64 | } 65 | -------------------------------------------------------------------------------- /NodeJS/log-footage-download.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Logs pre and post footage download in Templater for After Effects 4 | Copyright (c) Dataclay LLC 2024 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | To log information about footage assets Templater retrieves, enter the 12 | following command within the "Before footage download" and 13 | "After footage download" fields found within the Templater Preferences 14 | dialog. If using the Templater CLI, enter it into the "pre_cmd_dl" 15 | and "post_cmd_dl" properties found within the templater-options.json file. 16 | 17 | 18 | /path/to/node /path/to/log-footage-download.js --event $event --aefile $aep --data $data_job --aedir $aep_dir --uri $dl_file_uri --name $dl_file_name --dir $dl_file_dir --mime $dl_file_mime --ext $dl_file_ext --file $dl_file 19 | 20 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 21 | 22 | //Required NodeJS Modules 23 | var os = require('os'), 24 | fs = require('fs'), 25 | fse = require('fs-extra'), 26 | util = require('util'), 27 | path = require('path'), 28 | moment = require('moment'), 29 | argv = require('minimist')(process.argv.slice(2)); 30 | 31 | var logfile = "templater-ftg-download.log", 32 | job_data = require(argv.data); 33 | proj = path.resolve(argv.aedir), 34 | log = path.join(os.tmpdir(), logfile), 35 | log_dest = path.join(proj, "templater-post-job.log"), 36 | msg = msg = "\r\n------------ [TEMPLATER DOWNLOAD] -----------\r\n"; 37 | 38 | 39 | //Design the output for the post job log 40 | msg += "\r\nTemplater Event [ " + argv.event + " ] on [ " + moment().format('MMMM Do YYYY, h:mm:ss a') + "]"; 41 | msg += "\r\n\r\nData set\r\n " + JSON.stringify(job_data, null, 4).replace(/(?:\r\n|\r|\n)/g, "\r\n "); 42 | msg += "\r\n\r\nProject directory\r\n > " + proj; 43 | msg += "\r\n\r\nProject file \r\n > " + path.basename(argv.aefile); 44 | msg += "\r\n\r\nDownload file URI\r\n > " + argv.uri; //$dl_file_uri 45 | msg += "\r\n\r\nDownload file name\r\n > " + argv.name; //$dl_file_name 46 | msg += "\r\n\r\nDownload file directory\r\n > " + argv.dir; //$dl_file_dir 47 | msg += "\r\n\r\nDownload mime type\r\n > " + argv.mime; //$dl_file_mime 48 | msg += "\r\n\r\nDownload file extension\r\n > " + argv.ext; //$dl_file_ext 49 | msg += "\r\n\r\nDownload file asset\r\n > " + argv.file; //$dl_file 50 | msg += "\r\n\r\n"; 51 | 52 | 53 | //Append to log and copy log to project directory. 54 | //NOTE: On Windows, NodeJS cannot append to files that 55 | // exist on a mapped network drive. First we 56 | // append a local file in the temp directory, then 57 | // copy it to the project directory 58 | 59 | try { 60 | fs.appendFileSync(log, msg, 'utf8'); 61 | fse.copySync(log, log_dest); 62 | } catch (err) { 63 | console.error(err.message); 64 | fs.appendFileSync("\r\nError : " + err.message); 65 | } 66 | -------------------------------------------------------------------------------- /ExtendScript/adjust-target-workarea.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2018 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | This code makes use of the Templater ExtendScript API which is 29 | documented at the following address: 30 | 31 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 32 | 33 | Follow these steps to use this script: 34 | 35 | 1. Add two columns or properties to your data source named 36 | `workarea-start` and `workarea-end`. 37 | 38 | 2. In your data source, for each job's `workarea-start` value, enter 39 | the frame number where you want the work area in the target 40 | composition to begin. Then, for each job's `workarea-end` value, 41 | enter the frame number where you want the work area in the target 42 | composition to end. 43 | 44 | 3. Register this script file with Templater's "After Update" event. 45 | 46 | 4. Run a batch render job with Templater with rows or objects that 47 | have different `workarea-start` and `workarea-end` values. 48 | The output corresponding to each row or object has a different 49 | work area. 50 | 51 | */ 52 | 53 | 54 | var targetComp = $D.target(), 55 | comp_fps = targetComp.frameRate; 56 | f_start = parseInt($D.job.get("workarea-start")); 57 | f_end = parseInt($D.job.get("workarea-end")); 58 | 59 | targetComp.workAreaStart = (f_start / comp_fps); 60 | targetComp.workAreaDuration = (f_end / comp_fps) - targetComp.workAreaStart; -------------------------------------------------------------------------------- /ExtendScript/truncate-long-string.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2018 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | This code makes use of the Templater ExtendScript API documented here: 29 | 30 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 31 | 32 | Follow these steps to use this script: 33 | 34 | 1. Add a column or property to your data source named `headline`. 35 | Map the `headline` values to a Text Layer within an After Effects 36 | composition using the Templater Settings effect. 37 | 38 | 2. In your data source, for each job's `headline` value, enter a text 39 | string that contains more than ten characters. You can enter 40 | strings less than ten characters long, but these will not be 41 | truncated as per the following ExtendScript code. 42 | 43 | 3. Register this script file with the "Before Update" event. 44 | 45 | 4. Using Templater, iterate through a set of rows or objects that have 46 | different `headline` values — some shorter than ten characters, and 47 | some longer. Notice that long text strings within the `headline` 48 | layer are post-fixed with `...`. 49 | 50 | 51 | */ 52 | 53 | function truncate(word){ 54 | 55 | var truncated_word; 56 | var max_characters = 10; 57 | 58 | truncated_word = (word.length > max_characters) ? word.slice(0, max_characters) + '...' : word; 59 | 60 | return truncated_word; 61 | 62 | } 63 | 64 | var current_headline = $D.job.get("headline"); 65 | 66 | $D.log.msg('TRUNCATE', "Now truncating string", current_headline); 67 | $D.job.set("headline", truncate(current_headline)); -------------------------------------------------------------------------------- /NodeJS/deploy-output/api.js: -------------------------------------------------------------------------------- 1 | var log = require('./logger'), 2 | enums = require('./constants'), 3 | stream = require('./stream'), 4 | async = require('async'), 5 | URL = require('url'), 6 | emoji = require('node-emoji'), 7 | path = require('path'), 8 | GoogleSpreadsheet = require('google-spreadsheet'), 9 | Q = require('q'), 10 | config = require('./config'), 11 | pad = require('pad'), 12 | jw = require('./jwplatform'), 13 | vmo = require('./vimeo'), 14 | aws = require('./aws'), 15 | axios = require('axios'); 16 | 17 | var queue = { 18 | 19 | job : {}, 20 | 21 | get_api_domain : () => { 22 | 23 | var endpoint = config.params.data.url, 24 | endpoint_parts = endpoint.split('/'), 25 | endpoint_protocol = endpoint_parts[0], 26 | endpoint_host = endpoint_parts[2], 27 | api_domain = endpoint_protocol + '//' + endpoint_host; 28 | 29 | return api_domain; 30 | 31 | }, 32 | 33 | get_job : (next, complete) => { 34 | 35 | var req = { auth: {} } 36 | 37 | req.auth.username = config.params.user.dclay_user; 38 | req.auth.password = config.params.user.dclay_pass; 39 | 40 | //log.info("\nGET'ing job with `_id` [ %s ] from Dataclay Queue", config.params.data.key); 41 | 42 | axios.get(queue.get_api_domain() + '/jobs/' + config.params.data.key, req) 43 | .then(response => { 44 | queue.job = response.data[0]; 45 | next(queue.job, complete); 46 | }) 47 | .catch(error => { 48 | log.error("Error getting information about the current job."); 49 | }) 50 | 51 | }, 52 | 53 | update_job : (next) => { 54 | 55 | var p = config.params, 56 | req_opts = { auth: {} }, 57 | req = {}; 58 | 59 | req_opts.auth.username = config.params.user.dclay_user; 60 | req_opts.auth.password = config.params.user.dclay_pass; 61 | 62 | req[enums.data.dist] = {}; 63 | 64 | req[enums.data.dist][p.fields.download.name ] = aws.S3_URL.video || 'Unavailable'; 65 | req[enums.data.dist][p.fields.dl_poster.name ] = aws.S3_URL.poster || 'Unavailable'; 66 | req[enums.data.dist][p.fields.dl_preview.name ] = aws.S3_URL.preview || 'Unavailable'; 67 | req[enums.data.dist][p.fields.bcast.name ] = enums.stream.status.CREATED; 68 | req[enums.data.dist][p.fields.stream.name ] = stream.key; 69 | req[enums.data.dist][p.fields.preview.name ] = stream.preview(); 70 | req[enums.data.dist][p.fields.embed.name ] = stream.embed(); 71 | req[enums.data.dist][p.fields.url.name ] = stream.url(); 72 | 73 | log.info("\n\t\t%s\tUpdating job [ %s ] with distribution details" 74 | , emoji.get('telephone_receiver') 75 | , config.params.data.key); 76 | 77 | //PATCH existing job object with data 78 | axios.patch(queue.get_api_domain() + '/jobs/' + config.params.data.key, req, req_opts) 79 | .then(response => { 80 | next(); 81 | }) 82 | .catch(error => { 83 | log.error(error); 84 | }); 85 | 86 | } 87 | 88 | } 89 | 90 | module.exports = queue; -------------------------------------------------------------------------------- /NodeJS/deploy-output/jwplatform.js: -------------------------------------------------------------------------------- 1 | var log = require('./logger'), 2 | enums = require('./constants'), 3 | path = require('path'), 4 | config = require('./config'), 5 | moment = require('moment'), 6 | request = require('request'), 7 | jwPlatform = require('jwplatform-api'), 8 | stream = require('./stream') 9 | fs = require('fs'), 10 | emoji = require('node-emoji'), 11 | aws = require('./aws'), 12 | jwLogger = null, 13 | api = null; 14 | 15 | jwLogger = { 16 | 17 | debug : function(info) { }, 18 | error : function(err) { } 19 | 20 | } 21 | 22 | var jw = { 23 | 24 | get : function(step) { 25 | 26 | api = new jwPlatform({ 27 | key : config.params.video.key //enums.jw.auth.key 28 | , secret : config.params.video.secret//enums.jw.auth.secret 29 | }, jwLogger); 30 | 31 | log.info("\n\t\tJW Platform ready for asset uploads"); 32 | 33 | step(); 34 | 35 | }, 36 | 37 | video : { 38 | 39 | key : null, 40 | 41 | asset : null, 42 | 43 | upload_obj : null, 44 | 45 | sanitize_user_readable : function(token) { 46 | 47 | return 48 | 49 | }, 50 | 51 | generate_upload_object : function(step) { 52 | 53 | api.getUploadUrl('v1', (err, result) => { 54 | jw.video.upload_obj = result 55 | step(); 56 | }) 57 | 58 | }, 59 | 60 | create : function(row, step) { 61 | 62 | var vid_options = { 63 | title : config.params.video.title 64 | , description : config.params.video.desc 65 | , author : config.params.user.name 66 | , expires_date : moment().add(1, 'years').unix() 67 | } 68 | 69 | var after_create = step; 70 | 71 | if (config.params.storage.type == enums.storage.types.S3) { 72 | 73 | vid_options.download_url = aws.S3_URL['video']; 74 | 75 | api.post('/v1/videos/create', vid_options, null, function(err, results){ 76 | 77 | if (err) { 78 | log.error(err); 79 | } else { 80 | stream.key = results.video.key; 81 | step(); 82 | } 83 | 84 | }); 85 | 86 | } else if (config.params.storage.type == enums.storage.types.NONE) { 87 | 88 | var form_data = { file : fs.createReadStream(stream.upload) }; 89 | 90 | async.series([ 91 | 92 | jw.video.generate_upload_object, 93 | 94 | function(step) { 95 | 96 | console.log("\n\t" + emoji.get('clapper') + "\tSending [ " + path.parse(stream.upload).base + " ] to JWPlatform... " + emoji.get('rocket')); 97 | 98 | request.post({uri: jw.video.upload_obj.uploadUrl, formData: form_data}, function(err, resp, body){ 99 | if (err) return console.error('Upload failed', err); 100 | 101 | //The upload finished, but now we have to set that video's attributes. 102 | stream.key = JSON.parse(body).media.key; 103 | vid_options.video_key = stream.key; 104 | api.post('/v1/videos/update', vid_options, null, function(err, results) { 105 | console.log("\n\t" + emoji.get('memo') + "\tFinished updating the video's properties."); 106 | after_create(); 107 | }); 108 | 109 | }); 110 | 111 | } 112 | 113 | ], function(err, result) { 114 | after_create(); 115 | }) 116 | 117 | } 118 | 119 | } 120 | 121 | } 122 | 123 | } 124 | 125 | module.exports = jw; -------------------------------------------------------------------------------- /NodeJS/package-sequence.js: -------------------------------------------------------------------------------- 1 | /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 | 3 | Archive an image sequence generated by Adobe After Effects via Templater 4 | Copyright (c) Dataclay LLC 2016 5 | MIT License 6 | 7 | You must enter `npm install` to install all dependency modules used in 8 | this script. All modules are listed in the package.json file in the 9 | root of this repository. 10 | 11 | Enter the following command within the "After all jobs" field found 12 | within the Templater Preferences dialog. If using the Templater CLI, 13 | enter the following command in the "post_cmd_job" property found 14 | within the templater-options.json file. 15 | 16 | node /path/to/event-scripts/NodeJS/package-sequence.js --outdir $out_dir --outname $id --aefile $aep --aedir $aep_dir --repo "/path/to/archive/repository" --extension "zip" 17 | 18 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 19 | 20 | //Required NodeJS Modules 21 | var os = require('os'), 22 | fs = require('fs'), 23 | fse = require('fs-extra'), 24 | util = require('util'), 25 | path = require('path'), 26 | moment = require('moment'), 27 | archiver = require('archiver'); 28 | argv = require('minimist')(process.argv.slice(2)); 29 | 30 | //Optional for testing: 31 | //Change these paths and strings if you want to test this script on the command line outside of AE 32 | var test_dest = "/path/to/output/folder", 33 | test_proj = "/path/to/project/folder", 34 | test_outname = "sequence_id"; //Use a name / id of a given job. 35 | test_repo = "/path/to/archive/repository"; 36 | test_ext = "zip"; //Use any extension you would like. 37 | 38 | var msg = ''; 39 | msg += "\r\n\r\n"; 40 | msg += "\r\n\r\n+-------------------------------------------------------------------+\r\n"; 41 | msg += "| Packaging Image Sequence |\\\r\n", 42 | msg += "+-------------------------------------------------------------------+\\\r\n", 43 | msg += "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\r\n\r\n" 44 | 45 | var logfile = "archival.log", 46 | proj = path.resolve(argv.aedir || test_proj), 47 | log = path.resolve(os.tmpdir(), logfile), 48 | log_dest, out_root, out_dir; 49 | 50 | //Location to write the archived sequence 51 | if (argv.outdir && argv.outname) { 52 | log_dest = path.resolve(argv.repo, logfile); 53 | out_root = path.resolve(argv.outdir); 54 | out_dir = path.resolve(argv.outdir, argv.outname); 55 | out_name = argv.outname; 56 | repo_dir = path.resolve(argv.repo); 57 | arch_ext = argv.extension; 58 | } else { 59 | log_dest = path.resolve(test_repo, logfile); 60 | out_root = path.resolve(test_dest); 61 | out_dir = path.resolve(test_dest, test_outname); 62 | out_name = test_outname; 63 | repo_dir = path.resolve(test_repo); 64 | arch_ext = test_ext; 65 | } 66 | 67 | //Design the output for the post job log 68 | msg += "\r\nArchived on => " + moment().format('MMMM Do YYYY, h:mm:ss a'); 69 | msg += "\r\n\r\nAE Project File => " + argv.aefile; 70 | msg += "\r\n\r\nAE Project Directory => " + proj; 71 | msg += "\r\n\r\nTemplater Output Root => " + out_root; 72 | msg += "\r\n\r\nAE Output Name => " + out_name; 73 | msg += "\r\n\r\nSequence Directory => " + out_dir; 74 | msg += "\r\n\r\nArchive Extension => " + arch_ext; 75 | 76 | console.log(msg); 77 | 78 | //Use archiver to zip up the entire out_dir 79 | var archive_filename = out_name + "." + arch_ext, 80 | archive_file = path.resolve(out_root, archive_filename), 81 | archive_output = fs.createWriteStream(archive_file), 82 | archive = archiver("zip"); 83 | 84 | archive_output.on('close', function() { 85 | 86 | msg += "\r\n\r\nArchive Size => " + archive.pointer(); 87 | msg += "\r\n\r\nArchive Location => " + path.resolve(repo_dir, archive_filename); 88 | msg += "\r\n\r\nArchiver finalized. The output file descriptor has closed." 89 | 90 | //Append to log and copy log to project directory. 91 | //NOTE: On Windows, NodeJS cannot append files that 92 | // exist on a mapped network drive. First we 93 | // append a local file in the temp directory, then 94 | // copy it to the project directory 95 | try { 96 | fs.appendFileSync(log, msg, 'utf8'); 97 | fse.copySync(log, log_dest); 98 | } catch (err) { 99 | console.error(err.message); 100 | fs.appendFileSync("\r\nError : " + err.message); 101 | } 102 | 103 | //Copy the archive to its repository 104 | fse.copySync(archive_file, path.resolve(repo_dir, archive_filename)); 105 | 106 | }); 107 | 108 | archive_output.on('error', function(err) { 109 | throw err; 110 | }); 111 | 112 | archive.pipe(archive_output); 113 | 114 | archive.directory(out_dir, "/").finalize(); 115 | -------------------------------------------------------------------------------- /macOS/event-logger.sh: -------------------------------------------------------------------------------- 1 | # +--------------------------------------------------------------------+ 2 | # | ____ __ __ | 3 | # | / __ \____ _/ /_____ ______/ /___ ___ __ | 4 | # | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 5 | # | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 6 | # | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 7 | # | Automating Digital Production /____/ | 8 | # | | 9 | # | | 10 | # | We believe that leveraging data in the design process should | 11 | # | be a playful and rewarding art. Our products make this | 12 | # | possible for digital content creators. | 13 | # | | 14 | # | |email |web |twitter | 15 | # | |support@dataclay.com |dataclay.com |@dataclay | 16 | # | | 17 | # | This code is provided to you for your personal or commercial | 18 | # | use. However, you must abide by the terms of the MIT | 19 | # | License: https://opensource.org/licenses/MIT | 20 | # | | 21 | # | | 22 | # | Copyright 2013-2018 Dataclay, LLC | 23 | # | Licensed under the MIT License | 24 | # | | 25 | # +--------------------------------------------------------------------+ 26 | 27 | # This is a sample bash script for learning how shell scripts can work 28 | # with Templater Bot on macOS. 29 | # 30 | # Follow these steps to use this script: 31 | # 32 | # 1. Open an After Effects project that is mapped to a data source 33 | # using Templater. 34 | # 35 | # 2. Open the main Templater panel. Open the `Templater Preferences` 36 | # dialog. Click the `Setup Shell Scripts` button. 37 | # 38 | # 3. Register the following to any orall of the events 39 | # 40 | # /path/to/event-logger.sh $event $id $aep_dir $log $data_job $aep $out_dir $out_name $data_batch $bot_name $machine_name $user_name $sources $data_start $data_end $out_file $now 41 | # 42 | # 4. Iterate through "Previews" of jobs as stored in Templater's 43 | # connected data source. 44 | # 45 | # 5. Inspect the `events.log` file that will be created in the same 46 | # directory as the After Effects project file. 47 | 48 | 49 | event="$1" 50 | job_id="$2" 51 | aep_loc="$3" 52 | templater_log="$4" 53 | data_job="$5" 54 | aep="$6" 55 | out_dir="$7" 56 | out_name="$8" 57 | data_batch="$9" 58 | bot_name="${10}" 59 | machine="${11}" 60 | user="${12}" 61 | sources="${13}" 62 | data_start="${14}" 63 | data_end="${15}" 64 | out_file="${16}" 65 | now="${17}" 66 | 67 | #The `events.log` file will be in the same directory as the AEP file 68 | log="$aep_loc/events.log" 69 | 70 | if [ $event = "bot_pre_data" ] 71 | then 72 | printf " - $(date) [ TEMPLATER EVENT : $event ] - \n\n" >> "$log" 73 | 74 | printf "\tTemplater Log File => $templater_log\n" >> "$log" 75 | printf "\tMachine Name => $machine\n" >> "$log" 76 | printf "\tUser Name => $user\n" >> "$log" 77 | printf "\tTimestamp => $now\n" >> "$log" 78 | printf "\tSource Location => $sources\n" >> "$log" 79 | printf "\tData Start => $data_start\n" >> "$log" 80 | printf "\tData End => $data_end\n" >> "$log" 81 | printf "\tBot Name => $bot_name\n" >> "$log" 82 | printf "\tAEP File => $aep\n" >> "$log" 83 | printf "\tAEP Location => $aep_loc\n" >> "$log" 84 | printf "\tOutput Location => $out_dir\n" >> "$log" 85 | 86 | 87 | else 88 | 89 | if [ $event = "bot_pre_job" ] || [ $event = "bot_post_job" ] 90 | then 91 | 92 | if [ $event = "bot_pre_job" ] 93 | then 94 | printf "\n" >> "$log" 95 | fi 96 | 97 | printf "\t- $(date) [ TEMPLATER EVENT : $event : JOB ID - $job_id ] - \n\n" >> "$log" 98 | 99 | if [ $event = "bot_post_job" ] 100 | then 101 | printf "" >> "$log" 102 | printf "\tJob Data File => $data_job\n" >> "$log" 103 | printf "\tTemplater Output Name => $out_name\n" >> "$log" 104 | printf "\tTemplater Output File => $out_file\n" >> "$log" 105 | fi 106 | 107 | else 108 | 109 | if [ $event = "bot_pre_layr" ] || [ $event = "bot_post_layr" ] || [ $event = "bot_pre_rndr" ] || [ $event = "bot_post_rndr" ] 110 | then 111 | printf "\t\t- $(date) [ TEMPLATER EVENT : $event ] - \n" >> "$log" 112 | else 113 | printf " - $(date) [ TEMPLATER EVENT : $event ] - \n" >> "$log" 114 | fi 115 | 116 | fi 117 | 118 | fi 119 | 120 | printf "\n" >> "$log" 121 | 122 | if [ $event = "bot_post_batch" ] 123 | then 124 | printf "\tBatch Data File => $data_batch\n" >> "$log" 125 | printf "\n# # #\n\n" >> "$log" 126 | fi 127 | -------------------------------------------------------------------------------- /ExtendScript/font-swapping.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2019 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | Written by Jon Christoffersen / jon@dataclay.com 29 | Based on a script by Shane Murphy @ prender.co 30 | 31 | This code makes use of the Templater ExtendScript API documented here: 32 | 33 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 34 | 35 | Follow these steps to use this script: 36 | 37 | 1. Register this script file on "Before Update" Event in Templater Preferences ExtendScript Setup. 38 | - Font key/column in data source must match the name of a text layer that is tagged with the Templater Settings effect, adding a "--font" tag at the end of the name 39 | -- example a text layer named "text-1" would have a font control column/key in the data source called "text-1--font" 40 | - Font name in data source must be the Postscript Name 41 | 42 | 2. Open an After Effects project that is mapped to a data source using Templater. 43 | 44 | 3. Open the main Templater panel. Iterate through jobs in the data source using Templater's "Preview" feature. 45 | 46 | 4. Inspect the `templater.log` file and note all the messages this code writes to that file. 47 | 48 | */ 49 | 50 | // Logging messages 51 | var textLabel= "FONT CHANGE"; 52 | var scriptErr = "Script error on line "; 53 | var textMsg = "Full list of dynamic text layers"; 54 | var textMsg2 = "Data flagged from source"; 55 | var textMsg3 = "Checking project for layers named..."; 56 | var textMsg4 = "Updating font for selected layers..."; 57 | 58 | // loop over all text layer objects and add names to array 59 | var textLayers = $D.harvest().text; 60 | var i; 61 | var layerNames = []; 62 | 63 | for (i = 0; i < textLayers.length; ++i){ 64 | layerName = textLayers[i].name; 65 | layerNames.push(layerName); 66 | } 67 | 68 | // log text layers to templater.log file 69 | try{ 70 | $D.log.msg(textLabel, textMsg, layerNames); 71 | } catch (err){ 72 | $D.log.msg(textLabel, textMsg, scriptErr + err.line + " : " + err.toString()); 73 | } 74 | 75 | // push data from job.get object that is tagged with "--font" into an array 76 | var taggedData = []; 77 | var dataFromSource = $D.job.get(); 78 | var isLabelFont = "--font"; 79 | 80 | for ( var key in dataFromSource ){ 81 | if (dataFromSource.hasOwnProperty(key)){ 82 | if(key.search(isLabelFont)!=-1){ 83 | taggedData.push(key); 84 | } 85 | } 86 | } 87 | // log keys from data object to an array in templater.log 88 | try{ 89 | $D.log.msg(textLabel, textMsg2, taggedData); 90 | } catch (err){ 91 | $D.log.msg(textLabel, textMsg2, scriptErr + err.line + " : " + err.toString()); 92 | } 93 | 94 | // strip "--font" tag and push into an array 95 | var fontLayers = []; 96 | var flag = /--font/gi; 97 | for (i=0 ; i < taggedData.length; ++i){ 98 | fontLayers.push(taggedData[i].split(flag, 1)); 99 | } 100 | // log stripped tags to templater.log 101 | try{ 102 | $D.log.msg(textLabel, textMsg3, fontLayers); 103 | } catch (err){ 104 | $D.log.msg(textLabel, textMsg3, scriptErr + err.line + " : " + err.toString()); 105 | } 106 | 107 | // swap the fonts if there is a textLayer object that matches the flagged data 108 | for(i=0; i> "%log%" 84 | echo. >> "%log%" 85 | echo Templater Log File : %TEMPLATER_LOG% >> "%log%" 86 | echo Machine Name : %MACHINE% >> "%log%" 87 | echo User Name : %USER% >> "%log%" 88 | echo Timestamp : %NOW% >> "%log%" 89 | echo Source Location : %SOURCES% >> "%log%" 90 | echo Data Start : %DATA_START% >> "%log%" 91 | echo Data End : %DATA_END% >> "%log%" 92 | echo Bot Name : %BOT_NAME% >> "%log%" 93 | echo AEP File : %AEP% >> "%log%" 94 | echo AEP Location : %AEP_LOC% >> "%log%" 95 | echo. >> "%log%" 96 | 97 | if "%EVENT%" == "bot_pre_job" ( 98 | 99 | echo Specific to [ %EVENT% ] event: >> "%log%" 100 | echo Job Data File : %DATA_JOB% >> "%log%" 101 | echo Output Asset Name : %OUT_NAME% >> "%log%" 102 | echo Output File Path : %OUT_FILE% >> "%log%" 103 | echo Output Directory : %OUT_DIR% >> "%log%" 104 | echo. >> "%log%" 105 | 106 | ) 107 | 108 | if "%EVENT%" == "bot_post_job" ( 109 | 110 | echo Specific to [ %EVENT% ] event: >> "%log%" 111 | echo Job Data File : %DATA_JOB% >> "%log%" 112 | echo Output Asset Name : %OUT_NAME% >> "%log%" 113 | echo Output File Path : %OUT_FILE% >> "%log%" 114 | echo Output Directory : %OUT_DIR% >> "%log%" 115 | echo. >> "%log%" 116 | 117 | ) 118 | 119 | if "%EVENT%" == "bot_pre_layr" ( 120 | 121 | echo Specific to [ %EVENT% ] event: >> "%log%" 122 | echo NONE >> "%log%" 123 | echo. >> "%log%" 124 | 125 | ) 126 | 127 | if "%EVENT%" == "bot_post_layr" ( 128 | 129 | echo Specific to [ %EVENT% ] event: >> "%log%" 130 | echo NONE >> "%log%" 131 | echo. >> "%log%" 132 | 133 | ) 134 | 135 | if "%EVENT%" == "bot_pre_rndr" ( 136 | 137 | echo Specific to [ %EVENT% ] event: >> "%log%" 138 | echo NONE >> "%log%" 139 | echo. >> "%log%" 140 | 141 | ) 142 | 143 | if "%EVENT%" == "bot_post_rndr" ( 144 | 145 | echo Specific to [ %EVENT% ] event: >> "%log%" 146 | echo NONE >> "%log%" 147 | echo. >> "%log%" 148 | 149 | ) 150 | 151 | if "%EVENT%" == "bot_post_batch" ( 152 | 153 | echo Speicifc to [ %EVENT% ] event: >> "%log%" 154 | echo Batch Data File : "%DATA_BATCH%" 155 | echo. >> "%log%" 156 | 157 | ) -------------------------------------------------------------------------------- /NodeJS/deploy-output/stream.js: -------------------------------------------------------------------------------- 1 | var log = require('./logger'), 2 | enums = require('./constants'), 3 | config = require('./config'), 4 | sprintf = require('sprintf-js').sprintf; 5 | 6 | var stream = { 7 | 8 | upload : null 9 | , thumb : null 10 | , clip : null 11 | , key : null 12 | , preview : null 13 | , embed_code : null 14 | , source_asset : null 15 | 16 | , info : { 17 | 18 | } 19 | 20 | , embed : function(row) { 21 | 22 | var p = config.params, 23 | service = p.video.service, 24 | formula = null; 25 | 26 | if (config.is_batch()) { 27 | 28 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 29 | 30 | switch (service) { 31 | case enums.video.services.VIMEO : formula = stream.embed_code; 32 | break; 33 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("")'; 34 | break; 35 | case enums.video.services.YOUTUBE : formula = null 36 | break; 37 | } 38 | 39 | } else { 40 | 41 | switch (service) { 42 | case enums.video.services.VIMEO : formula = stream.embed_code; 43 | break; 44 | case enums.video.services.JWPLATFORM : formula = ""; 45 | break; 46 | case enums.video.services.YOUTUBE : formula = null 47 | break; 48 | } 49 | 50 | } 51 | 52 | } else { 53 | 54 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 55 | 56 | switch (service) { 57 | case enums.video.services.VIMEO : formula = stream.embed_code; 58 | break; 59 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("")'; 60 | break; 61 | case enums.video.services.YOUTUBE : formula = null 62 | break; 63 | } 64 | 65 | } else { 66 | 67 | switch (service) { 68 | case enums.video.services.VIMEO : formula = stream.embed_code; 69 | break; 70 | case enums.video.services.JWPLATFORM : formula = ""; 71 | break; 72 | case enums.video.services.YOUTUBE : formula = null 73 | break; 74 | } 75 | 76 | } 77 | 78 | } 79 | 80 | return formula; 81 | 82 | } 83 | 84 | , preview : function(row) { 85 | 86 | var p = config.params, 87 | service = p.video.service, 88 | formula = null; 89 | 90 | if (config.is_batch()) { 91 | 92 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 93 | 94 | switch (service) { 95 | case enums.video.services.VIMEO : formula = '=CONCATENATE("https://",' + p.video.preview.domain + ', "/",' + p.fields.stream.letter + row.row_idx + ')'; 96 | break; 97 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("http://",' + p.video.preview.domain + ', "#",' + p.video.preview.route + ', "=", ' + p.fields.stream.letter + row.row_idx + ')'; 98 | break; 99 | case enums.video.services.YOUTUBE : formula = null 100 | break; 101 | } 102 | 103 | } else { 104 | 105 | switch (service) { 106 | case enums.video.services.VIMEO : formula = "https://" + enums.vimeo.playback.DOMAIN + "/" + stream.key; 107 | break; 108 | case enums.video.services.JWPLATFORM : formula = "https://" + enums.jw.playback.PREVIEW + stream.key + "-" + p.video.player_key; 109 | break; 110 | case enums.video.services.YOUTUBE : formula = null 111 | break; 112 | } 113 | 114 | } 115 | 116 | } else { 117 | 118 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 119 | 120 | switch (service) { 121 | case enums.video.services.VIMEO : formula = '=CONCATENATE("https://",' + p.video.preview.domain + ', "/", "' + stream.key + '")'; 122 | break; 123 | case enums.video.services.JWPLATFORM : formula = '=CONCATENATE("http://",' + p.video.preview.domain + ', "#",' + p.video.preview.route + ', "=", "' + stream.key + '")'; 124 | break; 125 | case enums.video.services.YOUTUBE : formula = null; 126 | break; 127 | } 128 | 129 | } else { 130 | 131 | switch (service) { 132 | case enums.video.services.VIMEO : formula = "https://" + enums.vimeo.playback.DOMAIN + "/" + stream.key; 133 | break; 134 | case enums.video.services.JWPLATFORM : formula = "https://" + enums.jw.playback.PREVIEW + stream.key + "-" + p.video.player_key; 135 | break; 136 | case enums.video.services.YOUTUBE : formula = null; 137 | break; 138 | 139 | } 140 | 141 | } 142 | 143 | } 144 | 145 | return formula; 146 | 147 | } 148 | 149 | , url : function(row) { 150 | 151 | var p = config.params; 152 | 153 | return sprintf(p.video.stream_url, stream.key); 154 | 155 | } 156 | } 157 | 158 | module.exports = stream; -------------------------------------------------------------------------------- /NodeJS/deploy-output/constants.js: -------------------------------------------------------------------------------- 1 | module.exports = Object.freeze({ 2 | 3 | defaults : { 4 | 5 | VIDEO_TITLE : "Templater Video" 6 | , STORAGE_FOLDER : "Uncategorized" 7 | 8 | }, 9 | 10 | jw : { 11 | 12 | status : { 13 | CREATED : "created" 14 | , PROCESSING : "processing" 15 | , UPDATING : "updating" 16 | , READY : "ready" 17 | , FAILED : "failed" 18 | } 19 | 20 | , playback : { 21 | PREVIEW : "cdn.jwplayer.com/previews/" 22 | , EMBED_CDN : "cdn.jwplayer.com/players/" 23 | } 24 | 25 | }, 26 | 27 | vimeo : { 28 | 29 | playback : { 30 | DOMAIN : "vimeo.com" 31 | } 32 | 33 | }, 34 | 35 | video : { 36 | 37 | services : { 38 | YOUTUBE : "YouTube" 39 | , JWPLATFORM : "JW Platform" 40 | , VIMEO : "Vimeo" 41 | } 42 | 43 | , status : { 44 | READY : "ready" 45 | , QUEUED : "queued" 46 | , PROCESSING : "processing" 47 | , DONE : "done" 48 | , FAIL : "fail" 49 | } 50 | 51 | }, 52 | 53 | stream : { 54 | 55 | status : { 56 | CREATED : "created" 57 | } 58 | 59 | }, 60 | 61 | aws : { 62 | 63 | signatureVersion : 'v4' 64 | , s3_default_url : 's3.amazonaws.com' 65 | , regions : { 66 | "us-east-1" : { name: "US East", location: "N. Virgina", endpoint: "s3.us-east-1.amazonaws.com" } 67 | , "us-east-2" : { name: "US East", location: "Ohio", endpoint: "s3.us-east-2.amazonaws.com" } 68 | , "us-west-1" : { name: "US West", location: "N. California", endpoint: "s3.us-west-1.amazonaws.com" } 69 | , "us-west-2" : { name: "US West", location: "Oregon", endpoint: "s3.us-west-2.amazonaws.com" } 70 | , "ca-central-1" : { name: "Canada", location: "Central", endpoint: "s3.ca-central-1.amazonaws.com" } 71 | , "ap-south-1" : { name: "Asia Pacific", location: "Mumbai", endpoint: "s3.ap-south-1.amazonaws.com" } 72 | , "ap-northeast-1" : { name: "Asia Pacific", location: "Tokyo", endpoint: "s3.ap-northeast-1.amazonaws.com" } 73 | , "ap-northeast-2" : { name: "Asia Pacific", location: "Seoul", endpoint: "s3.ap-northeast-2.amazonaws.com" } 74 | , "ap-northeast-3" : { name: "Asia Pacific", location: "Osaka", endpoint: "s3.ap-northeast-3.amazonaws.com" } 75 | , "ap-southeast-1" : { name: "Asia Pacific", location: "Singapore", endpoint: "s3.ap-southeast-1.amazonaws.com" } 76 | , "ap-southeast-2" : { name: "Asia Pacific", location: "Sydney", endpoint: "s3.ap-southeast-2.amazonaws.com" } 77 | , "cn-northwest-1" : { name: "China", location: "Ningxia", endpoint: "s3.cn-northwest-1.amazonaws.com.cn" } 78 | , "cn-north-1" : { name: "China", location: "Beijing", endpoint: "s3.cn-north-1.amazonaws.com.cn" } 79 | , "eu-central-1" : { name: "EU Central", location: "Frankfurt", endpoint: "s3.eu-central-1.amazonaws.com" } 80 | , "eu-west-1" : { name: "EU West", location: "Ireland", endpoint: "s3.eu-west-1.amazonaws.com" } 81 | , "eu-west-2" : { name: "EU West", location: "London", endpoint: "s3.eu-west-2.amazonaws.com" } 82 | , "eu-west-3" : { name: "EU West", location: "Paris", endpoint: "s3.eu-west-3.amazonaws.com" } 83 | , "sa-east-1" : { name: "South America", location: "São Paulo", endpoint: "s3.sa-east-1.amazonaws.com" } 84 | } 85 | 86 | }, 87 | 88 | data : { 89 | 90 | dist : "_distribution" 91 | 92 | , types : { 93 | 94 | GOOGLE : "google" 95 | , JSON_FILE : "json_file" 96 | , JSON_URL : "json_url" 97 | 98 | } 99 | 100 | , fields : { 101 | 102 | OUTPUT : "output" 103 | , S3_LINK : "s3-link-video" 104 | , S3_POSTER : "s3-link-poster" 105 | , S3_PREVIEW : "s3-link-preview" 106 | , STREAM : "stream-key" 107 | , BCAST : "broadcast-status" 108 | , EMBED : "embed-script" 109 | , PREV : "broadcast-preview" 110 | , URL : "stream-url" 111 | 112 | } 113 | 114 | , tokens : { 115 | 116 | GSHEET_DOMAIN : "docs.google.com" 117 | 118 | 119 | } 120 | 121 | }, 122 | 123 | storage : { 124 | 125 | types : { 126 | 127 | NONE : null 128 | , S3 : "S3" 129 | , DROPBOX : "Dropbox" 130 | , GDRIVE : "GDrive" 131 | 132 | } 133 | 134 | }, 135 | 136 | errors : { 137 | 138 | absent_gcreds_file : "No Google Services Account credential file found at: %s. You may need to create a service account in the Google API console. Please see this article for more information about Google Service accounts: https://cloud.google.com/iam/docs/understanding-service-accounts" 139 | , absent_awscreds_file : "No AWS credential file found at: %s. You need to store your AWS IAM Access Key ID, and Secret Access Key in a file to use this app. Please see this article retreiving your IAM keys from AWS: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html" 140 | , absent_jwcreds_file : "No JWPlatform credential file found at: %s. You need to store your JW Platform user name, API key, and key secret in a file to use this app. Refer to your account page on JWPlatform." 141 | , absent_ytcreds_file : "No YouTube credential file found at: %s. You need to download your credential file from the API Console for Google Cloud Platform." 142 | , absent_vmocreds_file : "No Vimeo credential file found at: %s. You need to create your Vimeo credential file based on authentication data supplied by Vimeo." 143 | , absent_stream_service : "It appears no video streaming service was selected. Please specify one using the --stream_service argument and try again." 144 | , incorrect_vmo_state : "There was a problem authorizing this application to use your Vimeo account. Please try again." 145 | , json_read_err : "There was an error reading the JSON from file %s. It may be malformed, please inspect it and try again." 146 | , absent_collection : "There was no data collection, or worksheet, found. Please use the --worksheet argument to specify which collection of data you want project to read and write to." 147 | 148 | } 149 | 150 | }); -------------------------------------------------------------------------------- /ExtendScript/event-logger.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | +--------------------------------------------------------------------+ 3 | | ____ __ __ | 4 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 5 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 6 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 7 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 8 | | Automating Digital Production /____/ | 9 | | | 10 | | | 11 | | We believe that leveraging data in the design process should | 12 | | be a playful and rewarding art. Our products make this | 13 | | possible for digital content creators. | 14 | | | 15 | | |email |web |twitter | 16 | | |support@dataclay.com |dataclay.com |@dataclay | 17 | | | 18 | | This code is provided to you for your personal or commercial | 19 | | use. However, you must abide by the terms of the MIT | 20 | | License: https://opensource.org/licenses/MIT | 21 | | | 22 | | | 23 | | Copyright 2013-2018 Dataclay, LLC | 24 | | Licensed under the MIT License | 25 | | | 26 | +--------------------------------------------------------------------+ 27 | 28 | This code makes use of the Templater ExtendScript API documented here: 29 | 30 | http://support.dataclay.com/content/how_to/bot/event_scripts/templater_extendscript_api_reference.htm 31 | 32 | Follow these steps to use this script: 33 | 34 | 1. Register this script file to any Templater event, or all events. 35 | 36 | 2. Open an After Effects project that is mapped to a data source 37 | using Templater. 38 | 39 | 3. Open the main Templater panel. Iterate through jobs in the data 40 | source using Templater's "Preview" feature. 41 | 42 | 4. Inspect the `templater.log` file and note all the messages this 43 | code writes to that file. 44 | 45 | */ 46 | 47 | var message, harvest, dl, dynamic_layer_names, 48 | footage = {}; 49 | job_props = {}; 50 | 51 | //Log what event was just broadcast 52 | $D.log.msg('JSX SCRIPT', "))) EVENT BROADCAST (((", $D.event()); 53 | 54 | if ($D.event() === 'bot_pre_data' ) { //Handling pre-data event 55 | 56 | $D.log.msg('JSX SCRIPT', "Re-versioing After Effects Project File with [ " + $D.task() + " ] task", File(app.project.file).fsName); 57 | 58 | } else if ($D.event() === 'bot_post_data') { //Handling post-data event 59 | 60 | message = "Retreived data from position " + $D.range().start + " to " + $D.range().end; 61 | $D.log.msg('JSX SCRIPT', message, $D.batch.get()); 62 | 63 | $D.log.msg('JSX SCRIPT', "Listing all the `title` values in this batch", $D.batch.get('title')); 64 | 65 | } else if ($D.event() === 'bot_pre_job') { //Handling pre-job event 66 | 67 | message = "Proceeding to re-version AEP for job [ " + $D.job.get('id') + " ]"; 68 | $D.log.msg('JSX SCRIPT', message); 69 | 70 | } else if ($D.event() === 'bot_pre_dl') { //Handling pre-dl event 71 | 72 | dl = $D.download(); 73 | message = "Proceeding to download remote footage"; 74 | $D.log.msg('JSX SCRIPT', message, dl.to_console(true)); 75 | 76 | } else if ($D.event() === 'bot_pre_ftg') { //Handling pre-ftg event 77 | 78 | footage.layer = $D.footage.layer() || null; 79 | footage.source = $D.footage.source() || null; 80 | footage.item = $D.footage.item() || null; 81 | 82 | message = "Proceeding to process footage"; 83 | if (footage.layer) { 84 | $D.log.msg('JSX SCRIPT', "Layer footage mapped to", footage.layer.name); 85 | $D.log.msg('JSX SCRIPT', "Layer source", footage.layer.source.file.fsName); 86 | } 87 | 88 | if (footage.source) { 89 | $D.log.msg('JSX SCRIPT', "Path to footage source", footage.source); 90 | } 91 | 92 | if (footage.item) { 93 | $D.log.msg('JSX SCRIPT', "Footage item ID in AE project", footage.item.id); 94 | } 95 | 96 | 97 | } else if ($D.event() === 'bot_pre_layr') { //Handling pre-update event 98 | 99 | harvest = $D.harvest(); 100 | dynamic_layer_names = []; 101 | 102 | message = "Proceeding to update following layers for job [ " + $D.job.get('id') + " ]"; 103 | 104 | for (var layer_type in harvest) { 105 | 106 | for (var i=0; i < layer_type.length; i++) { 107 | if (harvest[layer_type][i]) dynamic_layer_names.push((harvest[layer_type][i]).name); 108 | } 109 | 110 | } 111 | 112 | $D.log.msg('JSX SCRIPT', message, dynamic_layer_names); 113 | 114 | 115 | } else if ($D.event() === 'bot_post_layr') { //Handling post update event 116 | 117 | $D.log.msg('JSX SCRIPT', "Finished updating layers"); 118 | 119 | } else if ($D.event() === 'bot_pre_rndr') { //Handling pre output events 120 | 121 | $D.log.msg('JSX SCRIPT', "Creating output for job [ " + $D.job.get('id') + " ]"); 122 | 123 | } else if ($D.event() === 'bot_post_rndr') { 124 | 125 | $D.log.msg('JSX SCRIPT', "Finished creating output") 126 | 127 | job_props.aep_loc = Folder(app.project.file.parent).fsName; 128 | job_props.aep = File(app.project.file).fsName; 129 | job_props.log = $D.log.file().fsName; 130 | job_props.data_job = $D.job.file().fsName; 131 | job_props.output_loc = $D.output().loc.fsName; 132 | job_props.output_name = $D.output().name; 133 | job_props.bot_name = $D.bot_id(); 134 | job_props.module = $D.output().module; 135 | job_props.template = $D.output().template; 136 | 137 | $D.log.msg('JSX SCRIPT', "Details for most recently processed job", job_props.to_console(true)); 138 | 139 | } else if ($D.event() === 'bot_post_job') { 140 | 141 | message = "Re-versioning job [ " + $D.job.get('id') + " ] is now complete! File containing job information" 142 | $D.log.msg('JSX SCRIPT', message, $D.job.file().fsName); 143 | 144 | } else if ($D.event() === 'bot_post_dl') { 145 | 146 | message = "After Footage Download Event!" 147 | dl = $D.download(); 148 | $D.log.msg('JSX SCRIPT', message, dl.to_console(true)); 149 | 150 | } else if ($D.event() === 'bot_post_ftg') { 151 | 152 | message = "After Footage Processing Event!" 153 | footage.layer = $D.footage.layer() || null; 154 | footage.source = $D.footage.source() || null; 155 | footage.item = $D.footage.item() || null; 156 | 157 | message = "Proceeding to process footage"; 158 | if (footage.layer) { 159 | $D.log.msg('JSX SCRIPT', "Layer footage mapped to", footage.layer.name); 160 | $D.log.msg('JSX SCRIPT', "Layer source", footage.layer.source.file.fsName); 161 | } 162 | 163 | if (footage.source) { 164 | $D.log.msg('JSX SCRIPT', "Path to footage source", footage.source); 165 | } 166 | 167 | if (footage.item) { 168 | $D.log.msg('JSX SCRIPT', "Footage item ID in AE project", footage.item.id); 169 | } 170 | 171 | } else if ($D.event() === 'bot_post_batch') { 172 | 173 | $D.log.msg('JSX SCRIPT', "File containing batch data", $D.batch.file().fsName); 174 | 175 | } else if ($D.event() === 'bot_on_enable') { 176 | 177 | $D.log.msg('JSX SCRIPT', "The bot was just enabled. Waiting to process data."); 178 | 179 | } else if ($D.event() === 'bot_on_shutdown') { 180 | 181 | $D.log.msg('JSX SCRIPT', "The bot was just shut down. No longer waiting for data."); 182 | 183 | } 184 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/app.js: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | +--------------------------------------------------------------------+ 4 | | ____ __ __ | 5 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 6 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 7 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 8 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 9 | | Automating Digital Production /____/ | 10 | | | 11 | | | 12 | | We believe that leveraging data in the design process should | 13 | | be a playful and rewarding art. Our products make this | 14 | | possible for digital content creators. | 15 | | | 16 | | |email |web |twitter | 17 | | |support@dataclay.com |dataclay.com |@dataclay | 18 | | | 19 | | This code is provided to you for your personal or commercial | 20 | | use. However, you must abide by the terms of the MIT | 21 | | License: https://opensource.org/licenses/MIT | 22 | | | 23 | | | 24 | | Copyright 2013-2018 Dataclay, LLC | 25 | | Licensed under the MIT License | 26 | | | 27 | +--------------------------------------------------------------------+ 28 | 29 | |||||||| Description 30 | 31 | This application references a data source and uploads output assets 32 | generated by Templater to a cloud-based storage service, then stages 33 | that uploaded asset to a streaming provider and populates the original 34 | data source with a URL to share or embed. 35 | 36 | Please be sure to review the README.md file in this folder to 37 | understand how to use this script. 38 | 39 | */ 40 | 41 | var log = require('./logger'), 42 | enums = require('./constants'), 43 | async = require('async'), 44 | fs = require('fs'), 45 | pth = require('path'), 46 | nuuid = require('node-uuid'), 47 | nopen = require('open'), 48 | nurl = require('url'), 49 | nutil = require('util'), 50 | Q = require('q'), 51 | moment = require('moment'), 52 | emoji = require('node-emoji'), 53 | config = require('./config'), 54 | jw = require('./jwplatform'), 55 | gsheet = require('./gsheet'), 56 | dcQ = require('./api'), 57 | aws = require('./aws'), 58 | yt = require('./youtube'), 59 | vmo = require('./vimeo'), 60 | deploy = require('./deploy'), 61 | pad = require('pad'), 62 | argv = require('minimist')(process.argv.slice(2)); 63 | 64 | log.info("\n\n------- Deploying output on [ " + moment().format('MMMM Do YYYY, h:mm:ss A') + " ] ----------------") 65 | 66 | try { 67 | 68 | async.series([ 69 | 70 | function(step){ 71 | 72 | var conf = { 73 | gcreds : argv.gcreds_file 74 | , jwcreds : argv.jwcreds_file 75 | , awscreds : argv.awscreds_file 76 | , ytcreds : argv.ytcreds_file 77 | , vmocreds : argv.vmocreds_file 78 | , stream_service : argv.stream_service 79 | , stream_authorize : argv.stream_authorize 80 | , stream_group : argv.stream_group 81 | , stream_privacy : argv.stream_privacy 82 | , stream_comments : argv.stream_comments 83 | , stream_download : argv.stream_download 84 | , stream_overwrite : argv.stream_overwrite 85 | , data_type : argv.data_type || enums.data.types.GOOGLE 86 | , user : argv.author 87 | , data_collection : argv.worksheet 88 | , sheet_key : argv.sheet_key 89 | , data_uri : argv.data_uri 90 | , dclay_user : argv.dclay_user 91 | , dclay_pass : argv.dclay_pass 92 | , data_index : argv.data_index 93 | , data_key : argv.data_key 94 | , start_row : argv.start_row 95 | , end_row : argv.end_row 96 | , asset_loc : argv.asset_loc 97 | , poster_frame : argv.poster_frame 98 | , poster_archive : argv.poster_archive 99 | , poster_ext : argv.poster_ext || "png" 100 | , skip_clip_archive : ((argv.skip_clip_archive ? JSON.parse(argv.skip_clip_archive) : null) || false ) 101 | , asset_name : argv.asset_name || null 102 | , asset_ext : argv.asset_ext 103 | , preview_info : { domain : argv.domain_cell, route : argv.route_cell, player_key : argv.player_cell } 104 | , player_key : argv.player_key || null 105 | , storage_type : (argv.storage_service || enums.storage.types.NONE) 106 | , storage_region : argv.s3_region 107 | , storage_bucket : argv.s3_bucket 108 | , storage_folder : argv.s3_folder 109 | , broadcast : argv.broadcast 110 | , title : argv.title 111 | , desc : argv.desc || "null" 112 | , bot_enabled : argv.bot_enabled 113 | , stream_url : argv.stream_url 114 | }; 115 | 116 | config.get(conf); 117 | config.display(); 118 | step(); 119 | }, 120 | 121 | config.read_prefs, 122 | 123 | function choose_stream_service(step) { 124 | 125 | log.info("\n\t[ STREAMING SERVICE ]"); 126 | 127 | if (config.params.video.service == enums.video.services.JWPLATFORM) { 128 | jw.get(step) 129 | } else if (config.params.video.service == enums.video.services.YOUTUBE) { 130 | yt.get(step) 131 | } else if (config.params.video.service == enums.video.services.VIMEO) { 132 | vmo.get(step) 133 | } else { 134 | log.info("\n\t\tNo video streaming service selected."); 135 | step(); 136 | } 137 | 138 | }, 139 | 140 | function setup_storage(step) { 141 | 142 | log.info("\n\t[ STORAGE SERVICE ]"); 143 | 144 | if (config.params.storage.type === enums.storage.types.S3) 145 | { 146 | aws.config(step); 147 | } else { 148 | log.info("\n\t\tNo storage service selected."); 149 | step(); 150 | } 151 | 152 | }, 153 | 154 | function get_job(step) { 155 | 156 | if (!config.params.user.dclay_user) { 157 | gsheet.get(step) 158 | } else { 159 | step(); //Dataclay Queue does not need to open the data. 160 | } 161 | 162 | }, 163 | 164 | function process_video(step) { 165 | 166 | var p = config.params, 167 | sheet_query = null, 168 | sql = null; 169 | 170 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { //Google Sheet Datasource 171 | 172 | //Processing discontiguously (Templater Bot) 173 | if (!config.is_batch()) 174 | { 175 | 176 | sql = p.fields.index + '=' + p.data.key; 177 | 178 | sheet_query = { 179 | 'offset' : 1 180 | , 'limit' : 1 181 | , 'query' : sql 182 | }; 183 | 184 | //Processed contiguously (Batch process) 185 | } else { 186 | 187 | deploy.is_batch = true; 188 | 189 | sheet_query = { 190 | offset : (p.batch.start-1), 191 | limit : ((p.batch.end) - (p.batch.start))+1, 192 | orderby : p.fields.index 193 | } 194 | 195 | } 196 | 197 | //Retrieve the rows needed to process 198 | gsheet.worksheet.getRows(sheet_query, function( err, rows ){ 199 | 200 | if (err) { 201 | log.error("\n\t\tThere was an error:\n\t\t\t%s\n\t\t\tUsing sheet query %j", err, sheet_query); 202 | throw err; 203 | } 204 | 205 | if (config.is_batch()) { 206 | deploy.batch(rows, step); 207 | } else { 208 | deploy.single(rows[0], step); 209 | } 210 | 211 | }); 212 | 213 | } else { //API Data Source 214 | 215 | dcQ.get_job(deploy.single, step); 216 | 217 | } 218 | 219 | } 220 | 221 | ], function(err) { 222 | log.info("\n\t[ EXIT ]"); 223 | }); //END MAIN APP ENTRY 224 | 225 | } catch (err) { 226 | 227 | log.error(err.message); 228 | 229 | } 230 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/deploy.js: -------------------------------------------------------------------------------- 1 | var async = require('async'), 2 | fs = require('fs'), 3 | aws = require('./aws'), 4 | dcQ = require('./api'), 5 | vmo = require('./vimeo'), 6 | jw = require('./jwplatform'), 7 | enums = require('./constants'), 8 | emoji = require('node-emoji'), 9 | config = require('./config'), 10 | gsheet = require('./gsheet'), 11 | stream = require('./stream'), 12 | path = require('path'), 13 | log = require('./logger'); 14 | 15 | var deploy = { 16 | 17 | is_batch : false, 18 | 19 | video_file : null, 20 | 21 | poster_file : null, 22 | 23 | batch : function(rows, step) { 24 | 25 | var p = config.params, 26 | row_count = (p.batch.start-1); 27 | 28 | log.info("\n\t[ DEPLOYMENT %s ]" 29 | , emoji.get('airplane_departure')); 30 | 31 | log.info("\n\t\t%s\tDeploying [ %s ] videos to [ %s ] ..." 32 | , emoji.get('airplane_departure') 33 | , rows.length 34 | , p.video.service) 35 | 36 | async.eachOfSeries(rows, 37 | 38 | function(row, key, step) { 39 | 40 | row_count++; 41 | row.row_idx = row_count; 42 | gsheet.row = row; 43 | 44 | log.info("\n\n\t\t--------------------------------------------------\n\n\t\t%s\tOpened Row [ %s ] in Worksheet [ %s ]" 45 | , emoji.get('eyes') 46 | , row.row_idx 47 | , gsheet.worksheet.title); 48 | 49 | deploy.video_file = path.resolve(p.batch.assets, (row[p.fields.output.name] + "." + p.video.ext)); 50 | stream.upload = deploy.video_file; 51 | 52 | if (!p.video.overwrite && row[enums.data.fields.STREAM]) { 53 | 54 | log.info("\n\t\t%s\tRow [ %s ] already has a stream key. Skipping." 55 | , emoji.get('ok_hand') 56 | , row.row_idx); 57 | 58 | step(); 59 | 60 | } else { 61 | 62 | log.info("\n\t\t%s\tSearching for [ %s ] ..." 63 | , emoji.get('mag_right') 64 | , (row[p.fields.output.name] + "." + p.video.ext)); 65 | 66 | async.series([ 67 | 68 | //Transport file to storage provider 69 | function(step) { 70 | 71 | let video = { 72 | 73 | file : video_file 74 | , step : step 75 | 76 | } 77 | 78 | deploy.archive_asset(video); 79 | 80 | }, 81 | 82 | function(step) { 83 | gsheet.store_url(step) 84 | }, 85 | 86 | function(step) { 87 | gsheet.store_stream_key(step) 88 | }, 89 | 90 | function(step) { 91 | gsheet.store_embed_script(step) 92 | }, 93 | 94 | function(step) { 95 | gsheet.store_bcast_preview(step) 96 | }, 97 | 98 | function(step) { 99 | gsheet.store_stream_url(step) 100 | } 101 | 102 | ], function(){ 103 | 104 | log.info("\n\t\t%s\tWrote to row [ %s ] in worksheet [ %s ]" 105 | , emoji.get('pencil2') 106 | , row.row_idx 107 | , gsheet.worksheet.title); 108 | 109 | step(); 110 | 111 | }); 112 | 113 | } 114 | 115 | }, 116 | 117 | function(err){ 118 | 119 | if (!err) { 120 | log.info("\n\t[ COMPLETE ]\n\n\t\t%s\tDone processing [ %s ] rows" 121 | , emoji.get('thumbsup') 122 | , rows.length); 123 | } else { 124 | log.error("\n\n\t%sThere was an error during batch upload:\n\t\t%s" 125 | , emoji.get('x') 126 | , err.message) 127 | } 128 | 129 | step(); 130 | 131 | }); 132 | 133 | }, 134 | 135 | single : function(row, step) { 136 | 137 | log.info("\n\t[ DEPLOYMENT %s ]" 138 | , emoji.get('airplane_departure')); 139 | 140 | var p = config.params; 141 | 142 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) 143 | { 144 | gsheet.row = row; 145 | 146 | log.info("\n\t\t%s\tOpened row with [ %s ] key of [ %s ] in Worksheet [ %s ]" 147 | , emoji.get('eyes') 148 | , p.fields.index 149 | , row[p.fields.index] 150 | , gsheet.worksheet.title); 151 | } else { 152 | 153 | log.info("\n\t\t%s\tDeploying assets created from job with key [ %s ]" 154 | , emoji.get('eyes') 155 | , dcQ.job._id); 156 | 157 | } 158 | 159 | 160 | async.series([ 161 | 162 | function(step) { 163 | 164 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 165 | deploy.video_file = path.resolve(p.batch.assets, config.sanitize(row[p.fields.output.name]) + '.' + p.video.ext ); 166 | deploy.poster_file = path.resolve(p.batch.assets, config.sanitize(row[p.fields.output.name]) + '.' + p.video.thumb_ext ); 167 | deploy.preview_file = path.resolve(p.batch.assets, config.sanitize(row[p.fields.output.name]) + '.' + 'gif' ); 168 | } else { 169 | deploy.video_file = path.resolve(p.batch.assets, config.sanitize(p.video.asset_name || row["_id"]) + '.' + p.video.ext ); 170 | deploy.poster_file = path.resolve(p.batch.assets, config.sanitize(p.video.asset_name || row["_id"]) + '.' + p.video.thumb_ext ); 171 | deploy.preview_file = path.resolve(p.batch.assets, config.sanitize(p.video.asset_name ||row["_id"]) + '.' + 'gif' ); 172 | } 173 | 174 | 175 | stream.upload = deploy.video_file; 176 | stream.thumb = deploy.poster_file; 177 | 178 | stream.clip = deploy.preview_file; 179 | 180 | if (!p.video.overwrite && row[p.fields.stream.name]) { 181 | 182 | log.info("\n\t\t%s\tRow with key [ %s ] already has a stream key. Skipping." 183 | , emoji.get('ok_hand') 184 | , row[p.fields.index]); 185 | step(); 186 | 187 | } else { 188 | 189 | async.series([ 190 | 191 | //Transport video asset file to storage provider 192 | (step) => deploy.archive_asset({ 193 | name: 'video' 194 | , file: deploy.video_file 195 | , step: step}), 196 | 197 | //Transport poster to storage provider 198 | (step) => deploy.archive_asset({ 199 | name: 'poster' 200 | , file: deploy.poster_file 201 | , step: step}), 202 | 203 | (step) => { 204 | 205 | if (p.video.skip_clip_archive) { 206 | 207 | log.info("\n\t\t%s\tSkipping archival of gif clip." 208 | , emoji.get("package")); 209 | step(); 210 | 211 | } else { 212 | 213 | deploy.archive_asset({ 214 | name: 'preview' 215 | , file: deploy.preview_file 216 | , step: step}); 217 | } 218 | 219 | }, 220 | 221 | (step) => { 222 | 223 | if (p.video.service) { 224 | 225 | switch (config.params.video.service) { 226 | case enums.video.services.VIMEO : vmo.video.create(gsheet.row, step); break; 227 | case enums.video.services.JWPLATFORM : jw.video.create(gsheet.row, step); break; 228 | default : log.error(enums.errors.absent_stream_service); 229 | } 230 | 231 | } else { 232 | 233 | step(); 234 | 235 | } 236 | 237 | }, 238 | 239 | function (step) { 240 | 241 | //update the single row with all relevant data 242 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 243 | gsheet.update_single_row(step) 244 | } else { 245 | dcQ.update_job(step) 246 | } 247 | 248 | } 249 | 250 | ], function(err) { 251 | 252 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 253 | 254 | log.info("\n\t\t%s\tWrote to Row [ %s ] in Worksheet [ %s ]" 255 | , emoji.get('pencil2') 256 | , row[p.fields.index] 257 | , gsheet.worksheet.title); 258 | 259 | } else { 260 | 261 | log.info("\n\t\t%s\tSaved distribution details for job [ %s ] details" 262 | , emoji.get('cd') 263 | , config.params.data.key); 264 | 265 | } 266 | 267 | step(); 268 | }) 269 | 270 | } 271 | 272 | } 273 | 274 | ], function(err) { 275 | 276 | if (err) { 277 | log.error(err); 278 | throw err; 279 | } 280 | 281 | if (config.detect_datasource(p.data.url) === enums.data.types.GOOGLE) { 282 | 283 | log.info("\n\t[ COMPLETE ]\n\n\t\t%s\tDone processing row with [ %s ] key of [ %s ]" 284 | , emoji.get('thumbsup') 285 | , config.params.fields.index 286 | , gsheet.row[p.fields.index]); 287 | 288 | } else { 289 | 290 | log.info("\n\t[ COMPLETE ]\n\n\t\t%s\tDone processing job with key [ %s ]" 291 | , emoji.get('thumbsup') 292 | , config.params.data.key); 293 | 294 | } 295 | 296 | step(); 297 | 298 | }) 299 | 300 | }, 301 | 302 | archive_asset : function(options) { 303 | 304 | //if the file exists 305 | if (fs.existsSync(options.file)) { 306 | 307 | let file_data = fs.readFileSync(options.file); 308 | 309 | if (!file_data) { 310 | log.error(err); 311 | throw err; 312 | } 313 | 314 | // log.info("\n\t\t%s\tArchiving asset %s" 315 | // , emoji.get('ok_hand') 316 | // , file_data.toString()); 317 | 318 | let base64data = new Buffer(file_data, 'binary'); 319 | 320 | //invoke the AWS S3 actions if the storage type is S3 321 | if (config.params.storage.type == enums.storage.types.S3) { 322 | 323 | log.info("\n\t\t%s\tSending [ %s ] to Amazon S3 ..." 324 | , emoji.get('rocket') 325 | , path.parse(options.file).base); 326 | 327 | aws.asset = options.file; 328 | aws.put_obj(base64data, options.step); 329 | aws.S3_URL[options.name] = aws.download_url(options.file); 330 | 331 | } 332 | 333 | } else { 334 | 335 | log.error("\n\t\t%s\tCould not find file [ %s ]. Skipping archival process." 336 | , emoji.get('x') 337 | , options.file); 338 | 339 | step(); 340 | 341 | } 342 | 343 | 344 | } 345 | 346 | } 347 | 348 | module.exports = deploy; -------------------------------------------------------------------------------- /NodeJS/deploy-output/gsheet.js: -------------------------------------------------------------------------------- 1 | var log = require('./logger'), 2 | enums = require('./constants'), 3 | stream = require('./stream'), 4 | async = require('async'), 5 | URL = require('url'), 6 | emoji = require('node-emoji'), 7 | path = require('path'), 8 | GoogleSpreadsheet = require('google-spreadsheet'), 9 | Q = require('q'), 10 | config = require('./config'), 11 | pad = require('pad'), 12 | jw = require('./jwplatform'), 13 | vmo = require('./vimeo'), 14 | aws = require('./aws'); 15 | 16 | var gsheet = { 17 | 18 | doc : null, 19 | 20 | worksheet : {}, 21 | 22 | row : {}, 23 | 24 | parse_key : function(uri) { 25 | 26 | return uri.substring(uri.indexOf("/d/") + 3, uri.length); 27 | 28 | }, 29 | 30 | auth : function(step) { 31 | var creds = require(config.params.auth.google.creds); 32 | gsheet.doc.useServiceAccountAuth(creds, step); 33 | }, 34 | 35 | store_url : function(step) { 36 | 37 | var p = config.params; 38 | 39 | gsheet.worksheet.getCells({ 40 | 'min-row' : gsheet.row.row_idx 41 | ,'max-row' : gsheet.row.row_idx 42 | ,'min-col' : p.fields.download.pos 43 | ,'max-col' : p.fields.download.pos 44 | ,'return-empty' : true 45 | }, function(err, cells){ 46 | 47 | var dl_link = null, 48 | c = cells[0]; 49 | 50 | if (p.storage.type == enums.storage.types.S3) { 51 | 52 | var dl_link = 'https://' + (enums.aws.regions[p.storage.region]).endpoint + '/' + p.storage.bucket + '/' + p.storage.folder + '/' + path.parse(aws.asset).base; 53 | log.info("\n\t\t%s\tDownload @ %s" 54 | , emoji.get('floppy_disk') 55 | , dl_link); 56 | 57 | } else { 58 | 59 | dl_link = "Unavailable"; 60 | 61 | } 62 | 63 | gsheet.row.s3_url = dl_link; 64 | 65 | c.setValue(dl_link, step); 66 | 67 | }); 68 | 69 | }, 70 | 71 | select_sheet : function(step) { 72 | 73 | gsheet.doc.getInfo(function(err, info) { 74 | 75 | var sheet; 76 | 77 | log.info("\t\t" + pad('Google Document',25) + ' : [ ' + info.title + ' ] by ' + info.author.email); 78 | 79 | if (!config.params.data.collection) { 80 | log.error(enums.errors.absent_collection); 81 | throw new Error(enums.errors.absent_collection); 82 | } 83 | 84 | for (var i=0; i < info.worksheets.length; i++) { 85 | 86 | if (info.worksheets[i].title == config.params.data.collection) { 87 | sheet = info.worksheets[i]; 88 | break; 89 | } 90 | 91 | } 92 | 93 | log.info("\t\t" + pad('Worksheet', 25) + ' : [ ' + sheet.title + ' ] | ' + sheet.rowCount + ' Rows, ' + sheet.colCount + ' Columns'); 94 | 95 | gsheet.worksheet = sheet; 96 | 97 | step(); 98 | 99 | }); 100 | 101 | }, 102 | 103 | column_to_letter : function(col) { 104 | 105 | var temp, letter = ''; 106 | 107 | while (col > 0) 108 | { 109 | temp = (col - 1) % 26; 110 | letter = String.fromCharCode(temp + 65) + letter; 111 | col = (col - temp - 1) / 26; 112 | } 113 | 114 | return letter; 115 | 116 | }, 117 | 118 | store_stream_key : function(step) { 119 | 120 | gsheet.worksheet.getCells({ 121 | 'min-row' : gsheet.row.row_idx 122 | ,'max-row' : gsheet.row.row_idx 123 | ,'min-col' : config.params.fields.stream.pos 124 | ,'max-col' : config.params.fields.stream.pos 125 | ,'return-empty' : true 126 | }, (err, cells) => { 127 | 128 | async.series([ 129 | 130 | function(step) { 131 | 132 | switch (config.params.video.service) { 133 | case enums.video.services.VIMEO : vmo.video.create(gsheet.row, step); break; 134 | case enums.video.services.JW : jw.video.create(gsheet.row, step ); break; 135 | default : log.error(enums.errors.absent_stream_service); throw new Error(enums.errors.absent_stream_service); 136 | 137 | } 138 | 139 | }, 140 | 141 | function (step) { 142 | 143 | gsheet.worksheet.getCells({ 144 | 'min-row' : gsheet.row.row_idx 145 | ,'max-row' : gsheet.row.row_idx 146 | ,'min-col' : config.params.fields.stream.pos 147 | ,'max-col' : config.params.fields.stream.pos 148 | ,'return-empty' : true 149 | }, function(err, cells) { 150 | 151 | var c = cells[0], 152 | stream_key = null; 153 | 154 | c.setValue(stream.key, function(err, results) { 155 | 156 | gsheet.worksheet.getCells({ 157 | 'min-row' : gsheet.row.row_idx 158 | ,'max-row' : gsheet.row.row_idx 159 | ,'min-col' : config.params.fields.bcast.pos 160 | ,'max-col' : config.params.fields.bcast.pos 161 | ,'return-empty' : true 162 | }, function(err, cells) { 163 | 164 | var c = cells[0]; 165 | 166 | c.setValue(enums.stream.status.CREATED, function() { 167 | 168 | log.info("\n\t\t%s\t[ %s ] stream key [ %s ]" 169 | , emoji.get('key') 170 | , config.params.video.service 171 | , stream.key); 172 | 173 | log.info("\n\t\t%s\tBroadcast status [ %s ]" 174 | , emoji.get('studio_microphone') 175 | , enums.stream.status.CREATED); 176 | 177 | log.info("\n\t\t%s\t[ %s ] staged to [ %s ]" 178 | , emoji.get('timer_clock') 179 | , gsheet.row[config.params.fields.output.name] 180 | , config.params.video.service); 181 | 182 | step(); 183 | 184 | }); 185 | 186 | }) 187 | 188 | }); 189 | 190 | }); 191 | 192 | } 193 | 194 | ], (err, results) => { 195 | 196 | step(); 197 | 198 | }); 199 | 200 | }) 201 | 202 | }, 203 | 204 | print_col : function(col) { 205 | log.info("\t\t" + pad((" [ " + col.name + " ]"), 25) + " : " + pad(col.pos.toString(), 3) + " (" + col.letter + ")"); 206 | return; 207 | }, 208 | 209 | get_col_positions : function(step) { 210 | 211 | var deferred = Q.defer(); 212 | 213 | gsheet.worksheet.getCells({ 214 | 'min-row': 1, 215 | 'max-row': 1, 216 | 'min-col': 1, 217 | 'max-col': gsheet.worksheet.colCount, 218 | 'return-empty': true 219 | }, function(err, cells) { 220 | 221 | if (err) { 222 | deferred.reject(); 223 | } 224 | 225 | log.info("\t\t" + pad("Column Positions", 25)); 226 | 227 | var f = config.params.fields; 228 | 229 | for (var cell in cells) { 230 | 231 | var c = cells[cell]; 232 | 233 | // if the column / property exists in the fields list, then set it 234 | if (Object.keys(enums.data.fields).some((key) => { 235 | return enums.data.fields[key] === c.value 236 | })) { 237 | 238 | for (var name in f) { 239 | 240 | if (f[name].name === c.value && (name !== 'index')) { 241 | f[name].pos = c.col; 242 | f[name].letter = gsheet.column_to_letter(c.col); 243 | gsheet.print_col(f[name]); 244 | } 245 | 246 | } 247 | 248 | } 249 | 250 | } 251 | 252 | deferred.resolve(); 253 | step(); 254 | 255 | }); 256 | 257 | return deferred.promise; 258 | 259 | }, 260 | 261 | store_embed_script : function(step) { 262 | 263 | //Forumulate the embed script 264 | var p = config.params, 265 | key = jw.video.key, 266 | player = config.params.video.preview.player_key 267 | 268 | gsheet.worksheet.getCells({ 269 | 'min-row' : gsheet.row.row_idx 270 | ,'max-row' : gsheet.row.row_idx 271 | ,'min-col' : config.params.fields.embed.pos 272 | ,'max-col' : config.params.fields.embed.pos 273 | ,'return-empty' : true 274 | }, (err, cells) => { 275 | 276 | if (err) { log.error(err); throw err } 277 | 278 | var c = cells[0]; 279 | 280 | c.setValue(stream.embed(gsheet.row), step); 281 | 282 | }); 283 | 284 | }, 285 | 286 | store_bcast_preview : function(step) { 287 | 288 | var p = config.params; 289 | 290 | gsheet.worksheet.getCells({ 291 | 'min-row' : gsheet.row.row_idx 292 | ,'max-row' : gsheet.row.row_idx 293 | ,'min-col' : config.params.fields.preview.pos 294 | ,'max-col' : config.params.fields.preview.pos 295 | ,'return-empty' : true 296 | }, (err, cells) => { 297 | 298 | if (err) { log.error(err); throw err } 299 | 300 | var c = cells[0]; 301 | 302 | c.setValue(stream.preview(gsheet.row), step); 303 | 304 | }); 305 | 306 | }, 307 | 308 | store_stream_url : function(step) { 309 | 310 | var p = config.params; 311 | 312 | gsheet.worksheet.getCells({ 313 | 'min-row' : gsheet.row.row_idx 314 | ,'max-row' : gsheet.row.row_idx 315 | ,'min-col' : config.params.fields.url.pos 316 | ,'max-col' : config.params.fields.url.pos 317 | ,'return-empty' : true 318 | }, (err, cells) => { 319 | 320 | if (err) { log.error(err); throw err } 321 | 322 | var c = cells[0]; 323 | 324 | c.setValue(stream.url(), step); 325 | 326 | }); 327 | 328 | }, 329 | 330 | update_single_row : function(step) { 331 | 332 | var p = config.params; 333 | 334 | try { 335 | 336 | gsheet.row[p.fields.download.name ] = aws.S3_URL.video || 'Unavailable'; 337 | gsheet.row[p.fields.dl_poster.name ] = aws.S3_URL.poster || 'Unavailable'; 338 | gsheet.row[p.fields.dl_preview.name ] = aws.S3_URL.preview || 'Unavailable'; 339 | gsheet.row[p.fields.bcast.name ] = enums.stream.status.CREATED; 340 | gsheet.row[p.fields.stream.name ] = stream.key; 341 | gsheet.row[p.fields.preview.name ] = stream.preview(); 342 | gsheet.row[p.fields.embed.name ] = stream.embed(); 343 | gsheet.row[p.fields.url.name ] = stream.url(); 344 | 345 | // log.info("\n\t\t%s\tUpdating entire row with data\n\n%o" 346 | // , emoji.get('rocket') 347 | // , gsheet.row); 348 | 349 | log.info("\n\t\t%s\tUpdating entire row with data\n" 350 | , emoji.get('rocket')); 351 | 352 | gsheet.row.save(step); 353 | 354 | } catch (err) { 355 | 356 | log.error("\n\t\t%s\tThere was an error updating the Google Sheet with stream properties =>\n\n%o" 357 | , emoji.get('exclamation') 358 | , err); 359 | 360 | } 361 | 362 | }, 363 | 364 | get : function(step) { 365 | 366 | //BEGIN MAIN ENTRY 367 | log.info("\n\t[ DATASTORE ]\n"); 368 | 369 | async.series([ 370 | 371 | //Create Google Spreadsheet object 372 | function (step) { 373 | gsheet.doc = new GoogleSpreadsheet(gsheet.parse_key(config.params.data.url)); 374 | step(); 375 | }, 376 | 377 | //Authorize application to manipulate doc 378 | gsheet.auth, 379 | 380 | //Select the sheet given a key 381 | gsheet.select_sheet, 382 | 383 | //Determine position of important columns 384 | gsheet.get_col_positions 385 | 386 | ], function(err, result) { 387 | 388 | if (err) { 389 | deferred.reject(); 390 | } 391 | 392 | step(); 393 | 394 | }); 395 | 396 | } 397 | 398 | } 399 | 400 | module.exports = gsheet 401 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/publish.js: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | +--------------------------------------------------------------------+ 4 | | ____ __ __ | 5 | | / __ \____ _/ /_____ ______/ /___ ___ __ | 6 | | / / / / __ `/ __/ __ `/ ___/ / __ `/ / / / | 7 | | / /_/ / /_/ / /_/ /_/ / /__/ / /_/ / /_/ / | 8 | | /_____/\__,_/\__/\__,_/\___/_/\__,_/\__, / | 9 | | Automating Digital Production /____/ | 10 | | | 11 | | | 12 | | We believe that leveraging data in the design process should | 13 | | be a playful and rewarding art. Our products make this | 14 | | possible for digital content creators. | 15 | | | 16 | | |email |web |twitter | 17 | | |support@dataclay.com |dataclay.com |@dataclay | 18 | | | 19 | | This code is provided to you for your personal or commercial | 20 | | use. However, you must abide by the terms of the MIT | 21 | | License: https://opensource.org/licenses/MIT | 22 | | | 23 | | | 24 | | Copyright 2013-2018 Dataclay, LLC | 25 | | Licensed under the MIT License | 26 | | | 27 | +--------------------------------------------------------------------+ 28 | 29 | |||||||| Description 30 | 31 | This application transcodes output created by Templater and then 32 | deploys that output to a video streaming platform. It does this using 33 | child processes that are the `transcode-after-job.js` and the 34 | `deploy-output/app.js` scripts. A single interface is here for making 35 | use of both scripts in one call. 36 | 37 | */ 38 | 39 | var log = require('./logger'), 40 | enums = require('./constants'), 41 | async = require('async'), 42 | fs = require('fs'), 43 | path = require('path'), 44 | nuuid = require('node-uuid'), 45 | nopen = require('open'), 46 | nurl = require('url'), 47 | nutil = require('util'), 48 | moment = require('moment'), 49 | emoji = require('node-emoji'), 50 | config = require('./config'), 51 | pad = require('pad'), 52 | argv = require('minimist')(process.argv.slice(2)); 53 | 54 | const { spawn } = require('child_process'); 55 | 56 | log.info("\n\n------- Publishing After Effects output on [ " + moment().format('MMMM Do YYYY, h:mm:ss A') + " ] ----------------") 57 | 58 | var user_conf = {}; 59 | 60 | try { 61 | 62 | async.series([ 63 | 64 | (step) => { 65 | 66 | user_conf = { 67 | 68 | gcreds : argv.gcreds_file 69 | , jwcreds : argv.jwcreds_file 70 | , awscreds : argv.awscreds_file 71 | , ytcreds : argv.ytcreds_file 72 | , vmocreds : argv.vmocreds_file 73 | , stream_service : argv.stream_service 74 | , stream_authorize : argv.stream_authorize 75 | , stream_group : argv.stream_group 76 | , stream_privacy : argv.stream_privacy 77 | , stream_comments : argv.stream_comments 78 | , stream_download : argv.stream_download 79 | , stream_overwrite : argv.stream_overwrite 80 | , data_type : argv.data_type || enums.data.types.GOOGLE 81 | , user : argv.author 82 | , data_collection : argv.worksheet 83 | , sheet_key : argv.sheet_key 84 | , data_uri : argv.data_uri 85 | , dclay_user : argv.dclay_user 86 | , dclay_pass : argv.dclay_pass 87 | , data_index : argv.data_index 88 | , data_key : argv.data_key 89 | , start_row : argv.start_row 90 | , end_row : argv.end_row 91 | , asset_loc : argv.asset_loc 92 | , poster_frame : argv.poster_frame 93 | , poster_archive : argv.poster_archive 94 | , poster_ext : argv.poster_ext || "png" 95 | , skip_clip_archive : ((argv.skip_clip_archive ? JSON.parse(argv.skip_clip_archive) : null) || false ) 96 | , asset_name : argv.asset_name || null 97 | , asset_ext : argv.asset_ext 98 | , preview_info : { domain : argv.domain_cell, route : argv.route_cell, player_key : argv.player_cell } 99 | , player_key : argv.player_key || null 100 | , storage_type : (argv.storage_service || enums.storage.types.NONE) 101 | , storage_region : argv.s3_region 102 | , storage_bucket : argv.s3_bucket 103 | , storage_folder : argv.s3_folder 104 | , broadcast : argv.broadcast 105 | , title : argv.title 106 | , desc : argv.desc || "null" 107 | , bot_enabled : argv.bot_enabled 108 | , stream_url : argv.stream_url 109 | 110 | , input_file : path.resolve(argv.input) 111 | , outputdir : path.resolve(argv.outdir) 112 | , output : path.resolve(path.join(argv.outdir, argv.outname)) 113 | , outname : argv.outname 114 | , dest_loc : path.resolve(argv.dest) 115 | , remove_orig : ((argv.cleanup ? JSON.parse(argv.cleanup) : null) || false ) 116 | , vcodec : (argv.vcodec || 'libx264' ) 117 | , vbit : (parseInt(argv.vbit) || 2048 ) 118 | , acodec : (argv.acodec || 'ac3' ) 119 | , abit : (argv.abit || '128k' ) 120 | , file_ext : (argv.file_ext || '.mp4' ) 121 | , vcontainer : (argv.container || 'mp4' ) 122 | , pixformat : (argv.pixformat || 'yuv420p' ) 123 | , dimensions : (argv.dimensions || undefined ) 124 | , skip_preview : ((argv.skip_preview ? JSON.parse(argv.skip_preview) : null) || false ) 125 | 126 | , poster_time : (parseFloat(argv.poster) || 0 ) 127 | , poster_format : (argv.poster_format || 'png' ) 128 | , poster_quality : (String(argv.poster_quality) || '100' ) 129 | , poster_scale : (parseInt(argv.poster_scale) || null ) 130 | 131 | , gif_start : (argv.gif_start || 0 ) 132 | , gif_duration : (argv.gif_duration || 3 ) 133 | , gif_fps : (argv.gif_fps || 30 ) 134 | , gif_scale : (argv.gif_scale || 480 ) 135 | 136 | } 137 | 138 | log.info('\n\tPublishing Configuration\n\n%o', user_conf); 139 | 140 | step(); 141 | 142 | }, 143 | 144 | (step) => { 145 | 146 | //function to spawn `transcode-after-job.js` using captured settings above 147 | var transcode = spawn('node' 148 | , [ 149 | path.resolve(__dirname, '..', 'transcode-after-job.js'), 150 | '--input' , user_conf.input_file, 151 | '--outdir' , user_conf.outputdir, 152 | '--outname' , user_conf.outname, 153 | '--dest' , user_conf.dest_loc, 154 | '--dimensions' , user_conf.dimensions, 155 | '--skip_preview' , user_conf.skip_preview, 156 | '--poster' , user_conf.poster_time, 157 | ((user_conf.remove_orig) ? '--cleanup' : '') 158 | ] 159 | , { 160 | cwd : process.cwd() 161 | , env : process.env 162 | , stdio : ['inherit', 'inherit', 'pipe'] 163 | , encoding : 'utf-8' 164 | 165 | } 166 | ); 167 | 168 | transcode.on('exit', (code) => { 169 | console.log("Exiting transcode process with code [" + code + "]"); 170 | step(); 171 | }); 172 | 173 | }, 174 | 175 | (step) => { 176 | 177 | //function to call on deployment app with setting from interface 178 | var publish = spawn('node' 179 | , [ 180 | path.resolve(__dirname, 'app.js') 181 | , '--data_uri' , user_conf.data_uri 182 | , '--vmocreds_file' , user_conf.vmocreds 183 | , '--awscreds_file' , user_conf.awscreds 184 | , '--gcreds_file' , user_conf.gcreds 185 | , '--stream_service' , user_conf.stream_service 186 | , '--storage_service' , user_conf.storage_type 187 | , '--s3_region' , user_conf.storage_region 188 | , '--s3_bucket' , user_conf.storage_bucket 189 | , '--s3_folder' , user_conf.storage_folder 190 | , '--worksheet' , user_conf.data_collection 191 | , '--data_index' , user_conf.data_index 192 | , '--data_key' , user_conf.data_key 193 | , '--asset_loc' , user_conf.dest_loc 194 | , '--asset_ext' , user_conf.asset_ext 195 | , '--stream_privacy' , user_conf.stream_privacy 196 | , '--stream_group' , user_conf.stream_group 197 | , '--title' , user_conf.title 198 | , '--desc' , user_conf.desc 199 | , '--stream_url' , user_conf.stream_url 200 | , '--domain_cell' , user_conf.preview_info.domain 201 | , '--skip_clip_archive' 202 | ] 203 | , { 204 | cwd : process.cwd() 205 | , env : process.env 206 | , stdio : ['inherit', 'inherit', 'pipe'] 207 | , encoding : 'utf-8' 208 | 209 | } 210 | ); 211 | 212 | publish.on('exit', (code) => {step()}); 213 | 214 | } 215 | 216 | ], (err) => { 217 | 218 | log.info('\n\t[ FINISHED PUBLISHING! ]'); 219 | process.exit(); 220 | 221 | if (err) 222 | log.error(err); 223 | 224 | }) //END PUBLISHING APP ENTRY 225 | 226 | } catch (err) { 227 | 228 | log.error(err.message); 229 | 230 | } 231 | 232 | -------------------------------------------------------------------------------- /NodeJS/deploy-output/README.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | This application references a Templater Bot data source like a Google Sheet and then deploys Templater output video files. It can send them to a cloud-based storage service like Amazon S3, and also transfer them to a streaming provider. After it does these tasks, this app writes back information from these services like URLs, and embed codes to the Google Sheet. 4 | 5 | # Requirements 6 | 7 | You must have Adobe After Effects and Templater Bot installed to use this application. 8 | 9 | As of version 1.2.0 this application supports the following vendor services. 10 | 11 | |Data Source Types |Cloud Storage | Video Streaming | 12 | |:--------------------|:-----------------|:----------------------| 13 | | Google Sheets | Amazon S3 | JWPlatform or Vimeo | 14 | 15 | Of course, we want to support more vendor services, so we would never turn down a contribution or pull request from any of you. If you are so inclined to help with this, please do not hesitate! 16 | 17 | ## Security & Authorization 18 | As a matter of security, you must obtain the following security credentials from the vendors services above. 19 | 20 | 1. 📀 **Datastore**
21 | At the moment, this application only support Google Sheets as a data store. It will only read from and write to a single worksheet within a Sheet document. 22 | + **Google Sheets** 23 | + Read more about [Google Services Accounts](https://cloud.google.com/iam/docs/understanding-service-accounts), [Getting Started with Authentication](https://cloud.google.com/docs/authentication/getting-started) and [Setting Up Authentication for Server to Server Production Applications](https://cloud.google.com/docs/authentication/production#auth-cloud-implicit-nodejs) 24 | + Create a new project in the [Google Cloud Platform API Console](https://cloud.google.com/) and a Google Service Account email address 25 | + Download the credential file associated with your Google Service Account 26 | + Share any Google Sheet document with the Google Service Account email address 27 | 28 | 2. 📡 **Video Streaming Platform**
29 | As of version 1.2 only two video streaming platforms are supported: Vimeo, and JWPlatform. YouTube will be coming shortly — but feel free to add suppor and issue a pull request 😄. 30 | + **Vimeo** 31 | + [Register for a user account on Vimeo](https://developer.jwplayer.com/). A Vimeo Pro account is recommended. 32 | + Find your account's API Key and Secret in your Account dashboard, under the "Account" area 33 | + Create a video player on JWPlatform, and note the hash id of the player you created. 34 | 35 | + **JWPlatform** 36 | + [Register for a user account on JWPlatform](https://developer.jwplayer.com/) 37 | + Find your account's API Key and Secret in your Account dashboard, under the "Account" area 38 | + Create a video player on JWPlatform, and note the hash id of the player you created. 39 | 40 | 3. 🗄 **Storage Platform** 41 | + **Amazon Simple Storage Service** 42 | + Create and retrieve your Amazon Web Services IAM user *Access Key ID* and *Secret Access Key* 43 | + [Install and configure the AWS SDK](https://docs.aws.amazon.com/cli/latest/userguide/installing.html) on any machine that runs this application 44 | + Retrieve your AWS [IAM *Access Key ID* and *Secret Access Key*](https://console.aws.amazon.com/iam/) 45 | + [Create an S3 bucket](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/create-bucket.html) and a folder that within that buck both of which have write permissions for your IAM user account. 46 | 47 | 4. 🎖 **Credential Files**
48 | Fill in or replace the credential files for specific vendor services *if you need them*. You can find placeholder files in this repository 49 | + `google-service-account-creds.json` which can be downloaded from the Google Cloud Platform Console under your Google Service Accounts. 50 | + `aws-credentials.json` which you can modify with your AWS IAM *Access Key ID* and *Secret Access Key* 51 | + `templater-uploader-vimeo.json` which you can modify with the credentials that you can find in your [Vimeo Developer area](http://developer.vimeo.com) of Vimeo's domain. The values are generated after you create a Vimeo app. 52 | + `jwplatform-credentials.json` which you can modify with your JWPlatform account username, API key, and API secret. 53 | 54 | ## Datastore Setup 55 | 5. **Google Sheet** 56 | + Share the Google Sheet document that Templater will be using as a data source with the Google Service Account email address issued to you when you created the Google Services Account on the Google Cloud Platform API console. The email account might look like the following `my-video-uploader@mycompany.iam.gserviceaccount.com` 57 | + Add the following columns to the Google Sheet worksheet that is mapped to the elements within the After Effects template. 58 | 59 | + `output` — if you don't already have one, create an output column and populate each cell in this column with the name of the file that you want for each specific video. This application uses the names in these cells when getting the file on disk. *Each value in this column must be unique!* 60 | + `s3-link` — the cells under this column store URLs to the assets uploaded to your S3 bucket if you choose to have the assets stored there. S3 is great for archival purposes even if you choose a video streaming platform. 61 | + `broadcast-status` — the cells under this column store the status of the stream on the video streaming platform. The only value that this application currently inserts into these cells is `created`. 62 | + `stream-key` — the cells under this column store a universally unique identification string generated by the streaming platform when a video asset it uploaded to it. 63 | + `broadcast-preview` — the cells under this column will store URLs to a screening page. You may or may not have a screening page setup, so the values in this column may not be accurate. 64 | + `embed-script` — the cells under this column will store HTML `