├── .nvmrc ├── .npmrc ├── .gitignore ├── __tests__ ├── logs │ ├── sample-unsorted.log │ ├── sample-info.log │ ├── sample-customwarn.log │ ├── sample-warn.log │ ├── sample-error.log │ └── sample-jobs.log └── logparser.test.ts ├── webdav-permissions-sample.json ├── tsconfig.json ├── log.conf-sample.json ├── .eslintrc.json ├── src ├── lib │ ├── types.ts │ ├── logemitter.ts │ ├── logfluent.ts │ ├── logger.ts │ ├── logparser.ts │ └── logfetcher.ts └── cctail.ts ├── LICENSE ├── CHANGELOG.md ├── package.json └── README.md /.nvmrc: -------------------------------------------------------------------------------- 1 | 16.16.0 -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | engine-strict=true 2 | save-exact=true -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .history 3 | node_modules 4 | log.conf.js 5 | log.conf.json 6 | package-lock.json 7 | dist/* 8 | sandbox.txt 9 | -------------------------------------------------------------------------------- /__tests__/logs/sample-unsorted.log: -------------------------------------------------------------------------------- 1 | [2019-07-15 10:51:55.088 GMT] two 2 | [2019-07-15 10:50:55.088 GMT] one 3 | [2019-07-15 12:16:54.458 GMT] four 4 | [2019-07-15 12:16:53.459 GMT] three -------------------------------------------------------------------------------- /webdav-permissions-sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "clients": [ 3 | { 4 | "client_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 5 | "permissions": [ 6 | { 7 | "path": "/logs", 8 | "operations": [ 9 | "read_write" 10 | ] 11 | } 12 | ] 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /__tests__/logs/sample-info.log: -------------------------------------------------------------------------------- 1 | [2019-07-15 10:58:16.770 GMT] INFO PipelineCallServlet|1591333724|Sites-ACME_EU-Site|Adyen-Notify|PipelineCall|BPiATxU_Lc custom [] .*#GET#TOP <-> Adyen-Notify#POST#TOP 2 | [2019-07-15 11:00:07.235 GMT] INFO PipelineCallServlet|1692371210|Sites-ACME_EU-Site|Adyen-Notify|PipelineCall|n6ZMYOIbYm custom [] Adyen-Notify#POST#TOP -------------------------------------------------------------------------------- /__tests__/logs/sample-customwarn.log: -------------------------------------------------------------------------------- 1 | [2019-07-15 11:17:01.622 GMT] WARN SystemJobThread|609273395|IndexOrderChangeLogEvents|SearchSvcIndexChangeLog-Order com.demandware.component.search3.index.delta.impl.IndexChangeLogMgrImpl system JOB 081647d1b5 2d6c712cdec4515210ed29178c 1705178826222943232 - Unable to process ORDER index change log as change log is not enabled 2 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": true, 3 | "compilerOptions": { 4 | "module": "CommonJS", 5 | "target": "ES2017", 6 | "noImplicitAny": true, 7 | "removeComments": false, 8 | "declaration": true, 9 | "preserveConstEnums": true, 10 | "resolveJsonModule": true, 11 | "esModuleInterop" : true, 12 | "sourceMap": false, 13 | "outDir": "dist" 14 | }, 15 | "include": ["src"] 16 | } 17 | -------------------------------------------------------------------------------- /log.conf-sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "profiles": { 3 | "capiprofile": { 4 | "hostname": "dev01.mysandbox.demandware.net", 5 | "client_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 6 | "client_secret": "client-secret", 7 | "polling_interval": 30 8 | }, 9 | "bmprofile": { 10 | "hostname": "dev02.anotherbox.demandware.net", 11 | "username": "me@company.com", 12 | "password": "my_really_secret_password", 13 | "polling_interval": 30 14 | } 15 | }, 16 | "interactive": true, 17 | "fluent": { 18 | "enabled": false 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /__tests__/logs/sample-warn.log: -------------------------------------------------------------------------------- 1 | first line of log with missing info 2 | [2019-07-15 10:50:55.088 GMT] WARN PipelineCallServlet|1692371210|Adyen-Notify|PipelineCall|RrCsCHDvb2 custom [] multiline start 3 | multiline second line 4 | multiline third line 5 | [2019-07-15 10:50:55.088 GMT] WARN PipelineCallServlet|1692371210|Adyen-Notify|PipelineCall|RrCsCHDvb2 custom [] .*#GET#TOP <-> Adyen-Notify#POST#TOP 6 | [2019-07-15 11:16:53.458 GMT] WARN PipelineCallServlet|899141122|Adyen-Notify|PipelineCall|GUfhepk_2C custom [] Adyen-Notify#POST#TOP 7 | [2019-07-15 11:16:53.459 GMT] WARN PipelineCallServlet|899141122|Adyen-Notify|PipelineCall|GUfhepk_2C custom [] .*#GET#TOP <-> Adyen-Notify#POST#TOP 8 | last line -------------------------------------------------------------------------------- /__tests__/logs/sample-error.log: -------------------------------------------------------------------------------- 1 | [2019-07-15 05:46:01.650 GMT] ERROR SystemJobThread|196163799|sfcc-download-gdpr-einstein-response-files-for-einstein|DownloadGDPRLogFiles com.demandware.component.transaction.cquotient.feed.s3.S3Store Sites-Site JOB 120bb2571b 7868c495b3b052e054eea9b3b8 1905512992805178368 - AWS S3 access key, secret key, or bucket name is not setup yet 2 | System Information 3 | ------------------ 4 | RequestID: 7868c495b3b052e054eea9b3b8 5 | SessionType: JOB 6 | Truncated SessionID: 120bb2571b 7 | ServerName: 8 | ServerPort: -1 9 | 10 | Request Information 11 | ------------------- 12 | URI: 13 | Method: 14 | PathInfo: 15 | QueryString: 16 | Remote User: 17 | 18 | Request Parameters 19 | ------------------ 20 | 21 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["airbnb-base", "prettier"], 3 | "env": { 4 | "browser": false 5 | }, 6 | "plugins": ["jest"], 7 | "rules": { 8 | "no-multiple-empty-lines": [ 9 | "error", 10 | { 11 | "max": 2 12 | } 13 | ], 14 | "padded-blocks": ["error", "never"], 15 | "max-len": [ 16 | "off", 17 | { 18 | "code": 120, 19 | "tabWidth": 2 20 | } 21 | ], 22 | "no-var": "warn", 23 | "prefer-arrow-callback": "warn", 24 | "no-unused-vars": ["error", { "args": "none" }], 25 | "consistent-return": "warn", 26 | "no-plusplus": "off", 27 | "prefer-destructuring": "off", 28 | "no-use-before-define": "off", 29 | "comma-dangle": "off", 30 | "no-console": "off", 31 | "prefer-const": "off", 32 | "no-param-reassign": "off", 33 | "guard-for-in": "off", 34 | "no-await-in-loop": "off", 35 | "no-restricted-syntax": "off", 36 | "vars-on-top": "off", 37 | "object-shorthand": "off", 38 | "wrap-iife": "off", 39 | "func-names": "off" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/lib/types.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | 3 | export interface DwJson { 4 | hostname: string, 5 | client_id?: string, 6 | client_secret?: string, 7 | username?: string, 8 | password?: string, 9 | log_types?: string[] 10 | polling_interval?: number, 11 | refresh_loglist_interval?: number, 12 | token?: string, 13 | token_type?: string, 14 | token_expiry?: moment.Moment 15 | } 16 | 17 | export interface LogFile { 18 | log: string, 19 | size_string: string, 20 | date: moment.Moment, 21 | size?: number, 22 | debug: boolean 23 | } 24 | 25 | export interface LogLine { 26 | message: string, 27 | level: string, 28 | timestamp: moment.Moment, 29 | logfile?: string 30 | } 31 | 32 | export interface LogConfig { 33 | profiles: Profiles, 34 | fluent?: FluentConfig, 35 | interactive?: boolean, 36 | } 37 | 38 | export interface Profiles { 39 | [name: string]: DwJson 40 | } 41 | 42 | export interface FluentConfig { 43 | enabled: boolean, 44 | host?: string, 45 | port?: number, 46 | reconnect_interval?: number, 47 | timeout?: number, 48 | tag_prefix?: string 49 | } 50 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 openmindlab 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/lib/logemitter.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import s from 'underscore.string'; 3 | import logger from './logger'; 4 | import { LogLine } from './types'; 5 | 6 | const logemitter = { 7 | 8 | sort: function (logs: LogLine[]): LogLine[] { 9 | return logs.sort((a, b) => (a.timestamp || moment('1900-01-01')).valueOf() - (b.timestamp || moment('1900-01-01')).valueOf()); 10 | }, 11 | 12 | output: function (logs: LogLine[], printnots: boolean, debug: boolean): void { 13 | if (logs.length === 0) { 14 | logger.log(logger.debug, '.', debug); 15 | } 16 | 17 | for (let j = 0; j < logs.length; j++) { 18 | let log = logs[j]; 19 | 20 | if (!printnots && !log.timestamp) { 21 | // eslint-disable-next-line no-continue 22 | continue; 23 | } 24 | 25 | let message = ''; 26 | if (log.timestamp) { 27 | message = `${log.timestamp.local().format('YYYY-MM-DD HH.mm.ss.SSS ZZ')} `; 28 | } 29 | if (log.level) { 30 | message += `${s.rpad(log.level, 5)} `; 31 | } 32 | 33 | message += log.message; 34 | logger.log(log.level, message); 35 | } 36 | } 37 | } 38 | 39 | export default logemitter; 40 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Change Log 2 | 3 | ### v1.6.0 (2022/08/22) 4 | 5 | - upgrade dependencies - switch to node 16 by default 6 | 7 | ### v1.5.1 (2021/01/08) 8 | 9 | - Support for environment variables for use in profiles. 10 | 11 | ### v1.5.0 (2021/01/06) 12 | 13 | - Improved stability, re-authentication is more reliable now. 14 | 15 | ### v1.4.0 (2020/09/18) 16 | 17 | - Support for FluentD as a log collector 18 | - Headless mode - tail logs based on config file 19 | - Added Business Manager as an authentication method 20 | - Standardized console.log color outputs & debug logging 21 | 22 | ### v1.3.2 (2020/08/27) 23 | 24 | - Update dependencies 25 | - Fix parsing of numeric arguments (when using something like 001 as an environment name) 26 | 27 | ### v1.3.0 (2020/07/27) 28 | 29 | - Improved retry in case of expired auth. 30 | 31 | ### v1.2.0 (2020/04/26) 32 | 33 | - Update dependencies, migrated to typescript. 34 | 35 | ### v1.1.3 (2019/11/29) 36 | 37 | - Handling of PIG instances behind cloudflare, even if we are not getting content-length headers here. 38 | 39 | ### v1.1.2 (2019/08/22) 40 | 41 | - Better sorting of same-second log entries. 42 | 43 | ### v1.1.1 (2019/08/15) 44 | 45 | - Brighter, more readable log colors. 46 | 47 | ### v1.1.0 (2019/08/04) 48 | 49 | - Remove dependency on sfcc-ci for authentication. No more salesforce private deps, feeling a bit more free and snappy now. 50 | - Automatically refresh access token for your long tailing sessions 51 | - Better handling of hanging connections, now they will timeout sooner so we can retry without loosing too much time. 52 | 53 | ### v1.0.0 (2019/07/20) 54 | 55 | - First public release 56 | -------------------------------------------------------------------------------- /src/lib/logfluent.ts: -------------------------------------------------------------------------------- 1 | import logger from './logger'; 2 | import { FluentConfig, LogLine } from './types'; 3 | const fluent = require('fluent-logger'); 4 | 5 | class LogFluent implements FluentConfig { 6 | enabled: boolean; 7 | host: string; 8 | port: number; 9 | reconnect_interval: number; 10 | timeout: number; 11 | tag_prefix: string; 12 | 13 | constructor(fConfig: FluentConfig) { 14 | this.enabled = fConfig.enabled; 15 | this.host = fConfig.host || 'localhost'; 16 | this.port = fConfig.port || 24224; 17 | this.reconnect_interval = fConfig.reconnect_interval || 600; 18 | this.timeout = fConfig.timeout || 3.0; 19 | this.tag_prefix = fConfig.tag_prefix || 'sfcc'; 20 | 21 | fluent.configure(this.tag_prefix, { 22 | host: this.host, 23 | port: this.port, 24 | timeout: this.timeout, 25 | reconnectInterval: this.reconnect_interval * 1000 26 | }); 27 | } 28 | 29 | output(hostname: string, logs: LogLine[], printnots: boolean, debug: boolean) { 30 | logger.log(logger.debug, `Sending ${logs.length} new log records to FluentD this interval.`, debug); 31 | 32 | for (let j = 0; j < logs.length; j++) { 33 | let log = logs[j]; 34 | 35 | if (!printnots && !log.timestamp) { 36 | // eslint-disable-next-line no-continue 37 | continue; 38 | } 39 | 40 | try { 41 | fluent.emit(log.logfile.substr(0, log.logfile.indexOf('-')), { 42 | logfile: log.logfile, 43 | level: log.level, 44 | message: log.message.trim(), 45 | hostname: hostname 46 | }, fluent.EventTime.fromTimestamp(log.timestamp.valueOf())); 47 | } catch (err) { 48 | logger.log('error', 'Send to FluentD failed with error: ' + err); 49 | } 50 | } 51 | } 52 | } 53 | 54 | export default LogFluent; 55 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cctail", 3 | "version": "1.6.0", 4 | "description": "Salesforce Commerce Cloud logs remote tail", 5 | "keywords": [ 6 | "Salesforce Commerce Cloud", 7 | "Demandware" 8 | ], 9 | "repository": { 10 | "type": "git", 11 | "url": "https://github.com/openmindlab/cctail.git" 12 | }, 13 | "bugs": { 14 | "url": "https://github.com/openmindlab/cctail/issues" 15 | }, 16 | "main": "dist/cctail.js", 17 | "license": "MIT", 18 | "author": { 19 | "name": "Fabrizio Giustina" 20 | }, 21 | "bin": { 22 | "cctail": "dist/cctail.js" 23 | }, 24 | "scripts": { 25 | "build": "tsc", 26 | "test": "jest", 27 | "postversion": "git push && git push --tags", 28 | "prepare": "npm run build" 29 | }, 30 | "files": [ 31 | "dist/**/*", 32 | "README.md", 33 | "LICENSE", 34 | "log.conf-sample.json", 35 | "webdav-permissions-sample.json", 36 | "CHANGELOG.md" 37 | ], 38 | "engines": { 39 | "npm": ">=7.0.0", 40 | "node": ">=16.0.0" 41 | }, 42 | "dependencies": { 43 | "axios": "0.27.2", 44 | "colorette": "2.0.19", 45 | "fluent-logger": "^3.4.1", 46 | "moment": "2.29.4", 47 | "prompts": "2.4.2", 48 | "underscore.string": "3.3.6", 49 | "yargs": "17.5.1" 50 | }, 51 | "devDependencies": { 52 | "@types/jest": "28.1.7", 53 | "@types/node": "18.7.9", 54 | "@types/prompts": "2.0.14", 55 | "@types/underscore.string": "0.0.38", 56 | "@typescript-eslint/parser": "5.33.1", 57 | "eslint": "8.22.0", 58 | "eslint-config-airbnb-base": "15.0.0", 59 | "eslint-config-prettier": "8.5.0", 60 | "eslint-plugin-import": "2.26.0", 61 | "eslint-plugin-jest": "26.8.7", 62 | "eslint-plugin-mocha": "10.1.0", 63 | "jest": "28.1.3", 64 | "prettier-eslint": "15.0.1", 65 | "ts-jest": "28.0.8", 66 | "typescript": "4.7.4" 67 | }, 68 | "jest": { 69 | "testPathIgnorePatterns": [ 70 | "/node_modules/", 71 | ".history/" 72 | ], 73 | "preset": "ts-jest" 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /__tests__/logparser.test.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import util from 'util'; 3 | import path from 'path'; 4 | import logparser from '../src/lib/logparser'; 5 | import logemitter from '../src/lib/logemitter'; 6 | 7 | const readFile = util.promisify(fs.readFile); 8 | 9 | let jobcontent:string; 10 | let files: Promise[]; 11 | 12 | beforeAll(() => { 13 | jobcontent = fs.readFileSync('__tests__/logs/sample-jobs.log', 'UTF-8'); 14 | files = fs 15 | .readdirSync('__tests__/logs').map(name => { return readFile(path.join('__tests__/logs', name), 'UTF-8') }); 16 | }); 17 | 18 | test('correct number of items when parsing a single file', () => { 19 | let result = logparser.parseLog(fs.readFileSync('__tests__/logs/sample-warn.log', 'UTF-8')); 20 | expect(result.length).toBe(5); 21 | }); 22 | 23 | test('correct number of items when parsing job log file without level', () => { 24 | let result = logparser.parseLog(jobcontent); 25 | expect(result.length).toBe(10); 26 | }); 27 | 28 | test('message content is right', () => { 29 | let result = logparser.parseLog(fs.readFileSync('__tests__/logs/sample-warn.log', 'UTF-8')); 30 | expect(result[0].message).toBe('first line of log with missing info'); 31 | expect(result[1].message).toBe('PipelineCallServlet|1692371210|Adyen-Notify|PipelineCall|RrCsCHDvb2 custom [] multiline start\nmultiline second line\nmultiline third line'); 32 | expect(result[2].message).toBe('PipelineCallServlet|1692371210|Adyen-Notify|PipelineCall|RrCsCHDvb2 custom [] .*#GET#TOP <-> Adyen-Notify#POST#TOP'); 33 | expect(result[4].message).toBe('PipelineCallServlet|899141122|Adyen-Notify|PipelineCall|GUfhepk_2C custom [] .*#GET#TOP <-> Adyen-Notify#POST#TOP\nlast line'); 34 | }); 35 | 36 | 37 | test('logs are sorted', () => { 38 | let result = logemitter.sort(logparser.parseLog(fs.readFileSync('__tests__/logs/sample-unsorted.log', 'UTF-8'))); 39 | expect(result[0].message).toBe('one'); 40 | expect(result[1].message).toBe('two'); 41 | expect(result[2].message).toBe('three'); 42 | expect(result[3].message).toBe('four'); 43 | }); 44 | 45 | 46 | test('parse multiple files', async () => { 47 | let parsed = await logparser.process(files); 48 | expect(parsed.length).toBe(23); 49 | }); 50 | 51 | -------------------------------------------------------------------------------- /src/lib/logger.ts: -------------------------------------------------------------------------------- 1 | import { blue, blueBright, cyan, cyanBright, green, greenBright, magenta, magentaBright, red, redBright, yellow, yellowBright } from 'colorette'; 2 | const { log } = console; 3 | 4 | const logger = { 5 | 6 | debugPrefix: '*** ', 7 | 8 | debug: 'debug', 9 | error: 'error', 10 | fatal: 'fatal', 11 | info: 'info', 12 | jobs: 'jobs', 13 | profile: 'PROFILE', 14 | warn: 'warn', 15 | 16 | colormap: { 17 | DEBUG: cyanBright, 18 | debug: cyan, 19 | ERROR: redBright, 20 | error: red, 21 | FATAL: redBright, 22 | fatal: red, 23 | INFO: greenBright, 24 | info: green, 25 | JOBS: blueBright, 26 | jobs: blue, 27 | PROFILE: magentaBright, 28 | profile: magenta, 29 | WARN: yellowBright, 30 | warn: yellow 31 | }, 32 | 33 | log: function (level: string, text: string, debug?: boolean) { 34 | if (level !== this.debug) { 35 | log(this.colorize(level, text)); 36 | } else if (debug) { 37 | log(this.colorize(level, this.debugPrefix + text)); 38 | } 39 | }, 40 | 41 | colorize: function (level: string, text: string) { 42 | if (!level || level.length === 0) { 43 | return text; 44 | } 45 | 46 | let color = this.colormap[level]; 47 | if (!color) { 48 | switch (true) { 49 | case this.checkFor(level, "info"): 50 | color = this.colormap["info"]; 51 | break; 52 | case this.checkFor(level, "warn"): 53 | color = this.colormap["warn"]; 54 | break; 55 | case this.checkFor(level, "error"): 56 | color = this.colormap["error"]; 57 | break; 58 | case this.checkFor(level, "fatal"): 59 | color = this.colormap["fatal"]; 60 | break; 61 | case this.checkFor(level, "jobs"): 62 | color = this.colormap["jobs"]; 63 | break; 64 | case this.checkFor(level, "debug"): 65 | color = this.colormap["debug"]; 66 | break; 67 | case this.checkFor(level, "profile"): 68 | color = this.colormap["profile"]; 69 | break; 70 | default: 71 | break; 72 | } 73 | } 74 | 75 | if (color) { 76 | return color(text); 77 | } else { 78 | return text; 79 | } 80 | }, 81 | 82 | checkFor: function (input: string, term: string) { 83 | return (input.indexOf(term) != -1); 84 | } 85 | } 86 | 87 | export default logger 88 | -------------------------------------------------------------------------------- /__tests__/logs/sample-jobs.log: -------------------------------------------------------------------------------- 1 | [2019-07-15 07:43:01.613 GMT] [Locking] Thread 'SystemJobThread|71405794|sfcc-process-expired-servers[115]' successfully acquired lock 'jobconfiguration/sites/sfcc-process-expired-servers/0bbba1e69ac93ffab9f5e37511'. Lock Owner Identification: ['SystemJobThread|71405794|sfcc-process-expired-servers[115]','null',2019-07-15T07:43:01.000+0000,9999-12-31T00:00:00.000+0000,null,476f9b4340ca43da437404a50f:10.0.18.205:10182,485b60421f7f4f55fa7bd64df5,TRANSIENT,READLOCK] 2 | [2019-07-15 07:43:01.614 GMT] Executing job [sfcc-process-expired-servers]. ThreadID: SystemJobThread|71405794|sfcc-process-expired-servers[115] 3 | [2019-07-15 07:43:01.641 GMT] Job [sfcc-process-expired-servers] - Executing step [ProcessExpiredServers]... ThreadID: SystemJobThread|71405794|sfcc-process-expired-servers|ProcessExpiredServers[115] 4 | [2019-07-15 07:43:01.648 GMT] Job [sfcc-process-expired-servers] - Execution of step [ProcessExpiredServers] finished with status [OK]. ThreadID: SystemJobThread|71405794|sfcc-process-expired-servers|ProcessExpiredServers[115] 5 | [2019-07-15 07:43:01.654 GMT] Job [sfcc-process-expired-servers] - Execution of job finished with status [OK]. ThreadID: SystemJobThread|71405794|sfcc-process-expired-servers[115] 6 | [2019-07-15 07:43:01.655 GMT] Job [sfcc-inventory-pending_reservations-handle-orphaned] - Executing step [HandleOrphanedPendingReservationsJobStep]... ThreadID: SystemJobThread|609273395|sfcc-inventory-pending_reservations-handle-orphaned|HandleOrphanedPendingReservationsJobStep[128] 7 | [2019-07-15 07:43:01.656 GMT] [Locking] Thread 'SystemJobThread|71405794|sfcc-process-expired-servers[115]' successfully released lock 'jobconfiguration/sites/sfcc-process-expired-servers/0bbba1e69ac93ffab9f5e37511'. 8 | [2019-07-15 07:43:01.659 GMT] Job [sfcc-inventory-pending_reservations-handle-orphaned] - Execution of step [HandleOrphanedPendingReservationsJobStep] finished with status [OK]. ThreadID: SystemJobThread|609273395|sfcc-inventory-pending_reservations-handle-orphaned|HandleOrphanedPendingReservationsJobStep[128] 9 | [2019-07-15 07:43:01.666 GMT] Job [sfcc-inventory-pending_reservations-handle-orphaned] - Execution of job finished with status [OK]. ThreadID: SystemJobThread|609273395|sfcc-inventory-pending_reservations-handle-orphaned[128] 10 | [2019-07-15 07:43:01.668 GMT] [Locking] Thread 'SystemJobThread|609273395|sfcc-inventory-pending_reservations-handle-orphaned[128]' successfully released lock 'jobconfiguration/sites/sfcc-inventory-pending_reservations-handle-orphaned/272500ba93a6e98b1001ee1a0b'. -------------------------------------------------------------------------------- /src/lib/logparser.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import logger from './logger'; 3 | import { LogFile, LogLine } from './types'; 4 | 5 | const logparser = { 6 | process: async function (files: Promise<[LogFile, string]>[]): Promise { 7 | return Promise.all(files).then((values) => { 8 | return values.map((data) => { 9 | if (data[0].log.endsWith(".csv")) 10 | return this.parseCsv(data); 11 | else 12 | return this.parseLog(data); 13 | }).reduce((a, b) => { 14 | if (a.length === 0) 15 | return b; 16 | return a.concat(b); 17 | }); 18 | }); 19 | }, 20 | 21 | parseCsv: function (logdata: [LogFile, string]): LogLine[] { 22 | let logfile = logdata[0].log.replace("/", "-"); 23 | let timestamp = logdata[0].date; 24 | let data = logdata[1]; 25 | let linesobj: LogLine[] = []; 26 | let regexp = new RegExp((/([^\n]+)/g)); 27 | let mmatch; 28 | 29 | while (mmatch = regexp.exec(data)) { 30 | let start = mmatch.index; 31 | let end = start + mmatch[0].length; 32 | // Start > 0 guarantees we skip the first line, which is the CSV header. 33 | if (start > 0 && end > 0) { 34 | linesobj.push({ 35 | message: data.substring(start, end), 36 | timestamp: timestamp, 37 | level: logger.profile, 38 | logfile: logfile 39 | }); 40 | } 41 | } 42 | return linesobj; 43 | }, 44 | 45 | parseLog: function (logdata: [LogFile, string]): LogLine[] { 46 | let logfile = logdata[0].log; 47 | let data = logdata[1]; 48 | let linesobj: LogLine[] = []; 49 | let regexp = new RegExp((/\[([.0-9 .:-]*) GMT\] (DEBUG|INFO|WARN|ERROR|FATAL)? ?(.*)/g)); 50 | regexp.lastIndex = 0; 51 | let lastmatchend = 0; 52 | let mmatch; 53 | // eslint-disable-next-line no-cond-assign 54 | while (mmatch = regexp.exec(data)) { 55 | let start = mmatch.index; 56 | let end = start + mmatch[0].length; 57 | if (start > 0 && linesobj.length === 0) { 58 | linesobj.push({ 59 | message: data.substring(0, start - 1), 60 | timestamp: undefined, 61 | level: undefined, 62 | logfile: logfile 63 | }); 64 | } 65 | else if (start > (lastmatchend + 1)) { 66 | // append extra lines to the previous log 67 | linesobj[linesobj.length - 1].message += `${data.substring(lastmatchend, start - 1)}`; 68 | } 69 | // 2019-07-15 11:00:07.235 70 | let timestamp = moment.utc(mmatch[1], "YYYY-MM-DD HH:mm:ss.SSS"); 71 | let level = mmatch[2] || 'INFO'; 72 | let message = mmatch[3]; 73 | lastmatchend = end; 74 | linesobj.push( 75 | { 76 | timestamp, 77 | level, 78 | message, 79 | logfile 80 | } 81 | ); 82 | } 83 | if (linesobj.length > 0 && lastmatchend < data.length) { 84 | linesobj[linesobj.length - 1].message += `${data.substring(lastmatchend, data.length)}`; 85 | } 86 | if (lastmatchend === 0 && data && data.length > 0) { // no match 87 | linesobj.push({ 88 | message: data, 89 | timestamp: undefined, 90 | level: undefined, 91 | logfile: logfile 92 | }); 93 | } 94 | return linesobj; 95 | } 96 | } 97 | 98 | export default logparser; 99 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Salesforce Commerce Cloud log tail 2 | 3 | > Remote tail Salesforce Commerce Cloud logs via webdav. Allow to monitor more logs at once, merging the content of all the files in a single stream. Reports either to console or a FluentD collector. 4 | 5 | ## Features 6 | 7 | - Authentication using API Client _(recommended)_ OR Business Manager _(deprecated)_ 8 | - Interactive prompt for logs selection OR selection of logs by config file. 9 | - Supports configuration of multiple instances OR standard dw.json config file 10 | - Outputs to console OR FluentD collector 11 | - Multiple log tailing, with merging of log entries 12 | - In Console mode: 13 | - Sorts log entries by timestamp 14 | - Colors output based on log levels 15 | - Converts log timestamp to local timezone 16 | 17 | ## Installation 18 | 19 | ```bash 20 | $ npm i -g cctail 21 | ``` 22 | 23 | ## Requirements 24 | 25 | - Node >= 10 26 | 27 | ## Configuration 28 | 29 | Requires one of the following configuration files: 30 | 31 | - a `log.conf.json` file with multiple environments configured. This may be used if you want to easily switch between multiple instances 32 | - a standard `dw.json` file, tipically pointing to your working sandbox. 33 | 34 | `cctail` requires a correctly configured API client id/secret OR Business Manager username/password for accessing logs via webdav. **API client authentication is recommended, because it is faster after the initial authorization, and Business Manager authentication to WebDAV has been _deprecated_ by SalesForce.** 35 | 36 | ### Optional Configurations 37 | 38 | - `"profiles"`: 39 | - Standard log types: `analytics`, `api`, `console`, `customdebug`, `customerror`, `customfatal`, `custominfo`, `customwarn`, `dbinit-sql`, `debug`, `deprecation`, `error`, `fatal`, `info`, `jobs`, `migration`, `performance`, `quota`, `sql`, `staging`, `sysevent`, `syslog`, `warn` 40 | - `"log_types": ["log", "types", "array"]` _(default: all log types)_ - In non-interactive mode, defining this will limit the log types that cctail collects to this list. 41 | - Standard log types: `analytics`, `api`, `codeprofiler`, `console`, `customdebug`, `customerror`, `customfatal`, `custominfo`, `customwarn`, `dbinit-sql`, `debug`, `deprecation`, `error`, `fatal`, `info`, `jobs`, `migration`, `performance`, `quota`, `sql`, `staging`, `sysevent`, `syslog`, `warn` 42 | - `"polling_interval": nnn` _(default: `3`)_ - Frequency (seconds) with which cctail will poll the logs. 43 | - If you are using non-interactive mode to pipe the logs elsewhere (i.e. FluentD), a longer interval is recommended (i.e. 30 or 60). 44 | - `"refresh_loglist_interval": nnn` _(default: `600`)_ - In non-interactive mode, this is the frequency (seconds) in which cctail will check the WebDAV server for new logs that match your `log_types` criteria. 45 | - `"interactive": true|false` _(default: `true`)_ - Interactive mode asks which logs you will want to tail. If `false`, cctail will tail all of today's logs by default. 46 | 47 | #### FluentD 48 | **NOTE:** All configurations for fluentD are _optional_, except `enabled` must be set to `true` if you want to use it. 49 | 50 | - `fluent`: 51 | - `"enabled": true|false` _(default: `false`)_ - If enabled, logs will be directed to the Fluent collector. 52 | - `"host": "fluentd.yourco.com"` _(default: `localhost`)_ - FluentD collector host 53 | - `"port": nnn` _(default: `24224`)_ - FluentD collector port 54 | - `"reconnect_interval": nnn` _(default: `600`)_ - If the collector can't be reached, cctail will try to reconnect again in `nnn` seconds. 55 | - `"timeout": nnn` _(default: `3`)_ - Timeout to connect to FluentD collector 56 | - `"tag_prefix": "your_tag_prefix"` _(default: `sfcc`)_ - All logs sent to FluentD will have this prefix, followed by the log type (i.e. "sfcc.customerror"). 57 | 58 | ### Sample configuration files 59 | 60 | Sample dw.json: 61 | ```json 62 | { 63 | "hostname": "dev01-mysandbox.demandware.net", 64 | "client_id": "a12464ae-b484-4b90-4dfe-17e20844e9f0", 65 | "client_secret": "mysupersecretpassword" 66 | } 67 | ``` 68 | 69 | Sample log.conf.json: 70 | ```json 71 | { 72 | "profiles": { 73 | "dev01-api-client-example": { 74 | "hostname": "dev01-mysandbox.demandware.net", 75 | "client_id": "a12345ae-b678-9b01-2dfe-34e56789e0f1", 76 | "client_secret": "mysupersecretsecret", 77 | "polling_interval": 30, 78 | "refresh_loglist_interval": 900 79 | }, 80 | "dev02-bm-example": { 81 | "hostname": "dev02-mysandbox.demandware.net", 82 | "username": "user@yourco.com", 83 | "password": "mysupersecretpassword", 84 | "log_types": [ "customerror", "customwarn", "error", "jobs", "warn" ], 85 | "polling_interval": 60 86 | } 87 | }, 88 | "interactive": false, 89 | "fluent": { 90 | "enabled": true 91 | } 92 | } 93 | ``` 94 | 95 | If multiple instances are configured, you may directly pass the name of the instance for skipping the interactive selection prompt, e.g.: 96 | ```bash 97 | $ cctail dev02 98 | ``` 99 | 100 | ### API client configuration 101 | 102 | The API client id must be created in the account.demandware.com console. Before being able to use `cctail` you must grant the required permissions to that client id for accessing the logs folder through WebDAV in any target SFCC instance. 103 | 104 | To do so, access Business Manager and add the following to Administration -> Organization -> WebDAV Client Permissions, replacing the client_id value with your client id. **Note:** you may need to merge these settings with existing ones. 105 | 106 | ```json 107 | { 108 | "clients": [ 109 | { 110 | "client_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 111 | "permissions": [ 112 | { 113 | "path": "/logs", 114 | "operations": ["read_write"] 115 | } 116 | ] 117 | } 118 | ] 119 | } 120 | ``` 121 | 122 | ## Usage 123 | 124 | ```bash 125 | $ cctail 126 | ``` 127 | 128 | Run `cctail` in a folder containing either a log.conf-json or dw.json config file. 129 | The tool will display the list of available logs in order to let you interactively select the ones you want to monitor. 130 | 131 | ## License 132 | 133 | Released under the MIT license. 134 | -------------------------------------------------------------------------------- /src/lib/logfetcher.ts: -------------------------------------------------------------------------------- 1 | import Axios, { AxiosRequestConfig, AxiosResponse, Method } from 'axios'; 2 | import { cyan } from 'colorette'; 3 | import fs from 'fs'; 4 | import moment from 'moment'; 5 | import path from 'path'; 6 | import logger from './logger'; 7 | import { DwJson, LogFile } from './types'; 8 | 9 | const { log } = console; 10 | 11 | const ua = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36"; 12 | const timeoutMs = 3000; 13 | const initialBytesRead = 20000; 14 | 15 | // Yup, we're single threaded. Thanks SFCC API! 16 | const requestsMaxCount = 1; 17 | const requestIntervalMs = 10; 18 | let requestsPending = 0; 19 | 20 | const axios = Axios.create(); 21 | 22 | // Thank you @matthewsuan! https://gist.github.com/matthewsuan/2bdc9e7f459d5b073d58d1ebc0613169 23 | // Axios Request Interceptor 24 | axios.interceptors.request.use(function (config) { 25 | return new Promise((resolve, reject) => { 26 | let interval = setInterval(() => { 27 | if (requestsPending < requestsMaxCount) { 28 | requestsPending++; 29 | clearInterval(interval); 30 | resolve(config); 31 | } 32 | }, requestIntervalMs); 33 | }) 34 | }) 35 | 36 | // Axios Response Interceptor 37 | axios.interceptors.response.use(function (response) { 38 | requestsPending = Math.max(0, requestsPending - 1); 39 | return Promise.resolve(response); 40 | }, function (error) { 41 | requestsPending = Math.max(0, requestsPending - 1); 42 | return Promise.reject(error); 43 | }) 44 | 45 | const logfetcher = { 46 | 47 | errorcount: 0, 48 | errorlimit: 5, 49 | 50 | makeRequest: async function (profile: DwJson, methodStr: string, url_suffix: string, headers: Map, debug?: boolean): Promise { 51 | if (!this.isUsingBM(profile) && !this.isUsingAPI(profile)) { 52 | this.logMissingAuthCredentials(); 53 | process.exit(1); 54 | } 55 | 56 | let url = `https://${profile.hostname}/on/demandware.servlet/webdav/Sites/Logs`; 57 | let method: Method = (methodStr as Method); 58 | if (url_suffix && url_suffix.length > 0) { 59 | url += '/' + url_suffix; 60 | } 61 | 62 | let opts: AxiosRequestConfig = { 63 | method: method, 64 | timeout: timeoutMs, 65 | url: url, 66 | headers: {} 67 | } 68 | 69 | if (this.isUsingBM(profile)) { 70 | opts.headers.Authorization = 'Basic ' + Buffer.from(profile.username + ':' + profile.password).toString('base64'); 71 | } else { 72 | await this.authorize(profile, debug); 73 | opts.headers.Authorization = profile.token; 74 | } 75 | 76 | if (headers && headers.size > 0) { 77 | for (let [key, value] of headers) { 78 | opts.headers[key] = value; 79 | } 80 | } 81 | 82 | // logger.log(logger.debug, `Request: ${JSON.stringify(opts)}`, debug); 83 | return axios.request(opts); 84 | }, 85 | 86 | authorize: async function (profile: DwJson, debug?: boolean): Promise { 87 | if (!this.isUsingAPI(profile)) { 88 | this.logMissingAuthCredentials(); 89 | process.exit(1); 90 | } 91 | 92 | if (!profile.token || !profile.token_expiry || moment.utc().isSameOrAfter(profile.token_expiry)) { 93 | logger.log(logger.debug, `Client API token expired or not set, resetting Client API token.`); 94 | } else { 95 | return; 96 | } 97 | 98 | let opts: AxiosRequestConfig = { 99 | url: 'https://account.demandware.com/dw/oauth2/access_token?grant_type=client_credentials', 100 | method: 'POST', 101 | headers: { 102 | 'Content-Type': 'application/x-www-form-urlencoded' 103 | }, 104 | auth: { 105 | username: profile.client_id, 106 | password: profile.client_secret 107 | } 108 | } 109 | // logger.log(logger.debug, `Request: ${JSON.stringify(opts)}`, debug); 110 | 111 | try { 112 | logger.log(logger.debug, `Authenticating to client API using client id ${profile.client_id}`, debug); 113 | const response = await axios.request(opts); 114 | profile.token = response.data.token_type.trim() + ' ' + response.data.access_token.trim(); 115 | profile.token_expiry = moment.utc().add(response.data.expires_in, 's').subtract(profile.polling_interval, 's'); 116 | logger.log(logger.debug, `Authenticated, token expires at ${profile.token_expiry.toString()}`, debug); 117 | } catch (err) { 118 | logger.log(logger.error, `Error authenticating client id ${profile.client_id} - please check your credentials.\n${err}.`); 119 | process.exit(1); 120 | } 121 | }, 122 | 123 | fetchLogList: async function (profile: DwJson, debug?: boolean, logpath = ''): Promise { 124 | try { 125 | if (!logpath || logpath.length === 0) { 126 | logger.log(logger.debug, `Fetching log list from ${profile.hostname}`, debug); 127 | } else { 128 | logger.log(logger.debug, `Fetching log list from ${profile.hostname}, subdirectory ${logpath}`, debug); 129 | } 130 | let headers = new Map([["User-Agent", ua]]); 131 | let res = await this.makeRequest(profile, 'GET', logpath, headers, debug); 132 | return res.data; 133 | } catch (err) { 134 | logger.log(logger.error, 'Fetching log list failed with error: ' + err.message); 135 | switch (err.status) { 136 | case 401: 137 | logger.log(logger.error, 'Authentication successful but access to logs folder has been denied.'); 138 | logger.log(logger.error, 'Please add required webdav permissions in BM -> Administration -> Organization -> WebDAV Client Permissions.'); 139 | logger.log(logger.error, 'Sample permissions:'); 140 | logger.log(logger.error, fs.readFileSync(path.join(__dirname, '../webdav-permissions-sample.json'), 'utf8')); 141 | log('\n'); 142 | logger.log(logger.error, 'Exiting cctail.'); 143 | process.exit(1); 144 | case 500: 145 | logger.log(logger.error, 'Authentication successful but attempt to retrieve WebDAV logs failed.'); 146 | logger.log(logger.error, 'Please ensure your WebDAV permissions are syntactically correct and have no duplicate entries.'); 147 | logger.log(logger.error, 'Check in BM -> Administration -> Organization -> WebDAV Client Permissions.'); 148 | logger.log(logger.error, 'Sample permissions:'); 149 | logger.log(logger.error, fs.readFileSync(path.join(__dirname, '../webdav-permissions-sample.json'), 'utf8')); 150 | log('\n'); 151 | logger.log(logger.error, 'Exiting cctail.'); 152 | process.exit(1); 153 | default: 154 | return ''; 155 | } 156 | } 157 | }, 158 | 159 | fetchFileSize: async function (profile: DwJson, logobj: LogFile): Promise { 160 | let size = 0; 161 | try { 162 | logger.log(logger.debug, cyan(`Fetching size for ${logobj.log}`), logobj.debug); 163 | let res = await this.makeRequest(profile, 'HEAD', logobj.log, null, logobj.debug); 164 | if (res.headers['content-length']) { 165 | size = parseInt(res.headers['content-length'], 10); 166 | logger.log(logger.debug, `Fetched size for ${logobj.log}: size ${size}`, logobj.debug); 167 | } else { 168 | logger.log(logger.debug, `No content-length returned for ${logobj.log}`, logobj.debug); 169 | } 170 | } catch (err) { 171 | logger.log(logger.error, `Fetching file size of ${logobj.log} failed with error: ${err.message}`); 172 | } 173 | return size; 174 | }, 175 | 176 | fetchLogContent: async function (profile: DwJson, logobj: LogFile): Promise<[LogFile, string]> { 177 | try { 178 | // If logobj.size is negative, leave as-is but range starts at 0. (Log rollover case) 179 | let range = 0; 180 | if (logobj.log.endsWith("log")) { 181 | if (!logobj.size) { 182 | let size = await this.fetchFileSize(profile, logobj); 183 | range = logobj.size = Math.max(size - initialBytesRead, 0); 184 | } else if (logobj.size > 0) { 185 | range = logobj.size; 186 | } 187 | } else { 188 | logobj.size = -1; 189 | } 190 | 191 | let headers = new Map([["Range", `bytes=${range}-`]]); 192 | let res = await this.makeRequest(profile, 'GET', logobj.log, headers, logobj.debug); 193 | logger.log(logger.debug, `Fetching contents from ${logobj.log} retured status code ${res.status}`, logobj.debug); 194 | if (res.status === 206) { 195 | if (logobj.size < 0) { 196 | logobj.size = res.data.length; 197 | return [logobj, res.data]; 198 | } 199 | if (logobj.size === 0 && res.data.length > initialBytesRead) { 200 | logobj.size = res.data.length; 201 | return [logobj, res.data.substring(res.data.length - initialBytesRead)]; 202 | } 203 | logobj.size += res.data.length; 204 | return [logobj, res.data]; 205 | } 206 | } catch (err) { 207 | if (err.response) { 208 | logger.log(logger.debug, `Fetching contents from ${logobj.log} returned status code ${err.response.status}`, logobj.debug); 209 | } 210 | if (!err.response || err.response.status !== 416) { 211 | this.errorcount = this.errorcount + 1; 212 | if (this.errorcount > 1) { 213 | logger.log(logger.error, `Error fetching contents from ${logobj.log}: ${err.message} (error count ${this.errorcount})`); 214 | } else { 215 | // don't be too verbose, just retry if this was the first error 216 | logger.log(logger.debug, `Error fetching contents from ${logobj.log}: ${err.message} (error count ${this.errorcount})`); 217 | } 218 | } 219 | } 220 | return [logobj, '']; 221 | }, 222 | 223 | logMissingAuthCredentials: function () { 224 | logger.log(logger.error, ('Missing authentication credentials. Please add client_id/client_secret or username/password to log.conf.json or dw.json.')); 225 | logger.log(logger.error, (`Sample config:\n`)); 226 | logger.log(logger.error, (fs.readFileSync(path.join(__dirname, '../log.config-sample.json'), 'utf8'))); 227 | log('\n'); 228 | }, 229 | 230 | isUsingAPI: function (profile: DwJson) { 231 | return (profile.client_id && profile.client_secret) 232 | }, 233 | 234 | isUsingBM: function (profile: DwJson) { 235 | return (profile.username && profile.password) 236 | } 237 | } 238 | 239 | export default logfetcher; 240 | -------------------------------------------------------------------------------- /src/cctail.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { green, yellow } from 'colorette'; 4 | import fs from 'fs'; 5 | import moment from 'moment'; 6 | import path from 'path'; 7 | import prompts, { Choice } from 'prompts'; 8 | import s from 'underscore.string'; 9 | import yargs from 'yargs'; 10 | 11 | import logemitter from './lib/logemitter'; 12 | import logfetcher from './lib/logfetcher'; 13 | import LogFluent from './lib/logfluent'; 14 | import logger from './lib/logger'; 15 | import logparser from './lib/logparser'; 16 | import { DwJson, FluentConfig, LogConfig, LogFile, Profiles } from './lib/types'; 17 | 18 | let fluent: LogFluent; 19 | let logConfig: LogConfig; 20 | let profiles: Profiles; 21 | let profile: DwJson; 22 | let debug = false; 23 | let interactive = true; 24 | let pollingSeconds = 3; 25 | let refreshLogListSeconds = 600; 26 | let nextLogRefresh: moment.Moment; 27 | let latestCodeprofilerLogSent: LogFile; 28 | let envVarPrefix = "ENV_"; 29 | 30 | let run = async function () { 31 | let packageJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../package.json'), 'utf8')); 32 | logger.log(logger.info, `cctail - v${packageJson.version}`); 33 | 34 | readLogConf(); 35 | 36 | if (!profiles || Object.keys(profiles).length === 0) { 37 | logger.log(logger.warn, `No profiles in log.conf.json, checking for dw.json in path ${process.cwd()}\n`); 38 | readDwJson(); 39 | } 40 | 41 | yargs.parserConfiguration({ 42 | "parse-numbers": false 43 | }); 44 | 45 | const args: any = yargs.argv 46 | 47 | if (args['d']) { 48 | debug = true; 49 | } 50 | 51 | let fileobjs: LogFile[] = []; 52 | if (interactive) { 53 | fileobjs = await interact(args._[0] as string); 54 | } else { 55 | fileobjs = await dontInteract(args._[0] as string); 56 | } 57 | 58 | if (fileobjs.length === 0) { 59 | logger.log(logger.error, 'ERROR: No logs selected or returned, exiting.'); 60 | process.exit(-1); 61 | } 62 | 63 | setImmediate(pollLogs, fileobjs); 64 | } 65 | 66 | let dontInteract = async function (profilename?: string): Promise { 67 | if (!profile) { 68 | if (Object.keys(profiles).length === 1) { 69 | profile = profiles[Object.keys(profiles)[0]]; 70 | } else if (!profilename) { 71 | logger.log(logger.error, 'ERROR: No profile selected, exiting.'); 72 | process.exit(-1); 73 | } else if (!profiles[`${profilename}`]) { 74 | logger.log(logger.error, `ERROR: Specified profile ${profilename} not found.`); 75 | process.exit(-1); 76 | } else { 77 | profile = profiles[profilename]; 78 | logger.log(logger.info, `Using profile ${profilename}.`); 79 | } 80 | 81 | setPollingInterval(profile); 82 | if (profile.refresh_loglist_interval) { 83 | refreshLogListSeconds = profile.refresh_loglist_interval; 84 | logger.log(logger.info, `Setting log list refresh interval (seconds): ${refreshLogListSeconds}`); 85 | } else { 86 | profile.refresh_loglist_interval = refreshLogListSeconds; 87 | logger.log(logger.info, `Using default log list refresh interval (seconds): ${refreshLogListSeconds}`); 88 | } 89 | } 90 | 91 | nextLogRefresh = moment().add(refreshLogListSeconds, 's'); 92 | let fileobjs = await getThatLogList(profile); 93 | let logx: LogFile[] = []; 94 | if (profile.log_types && profile.log_types.length > 0) { 95 | for (let thisfile of fileobjs) { 96 | let logname = thisfile.log.substr(0, thisfile.log.indexOf('-')); 97 | if (profile.log_types.indexOf(logname) != -1) { 98 | logx.push(thisfile); 99 | } 100 | } 101 | } else { 102 | logx = fileobjs; 103 | } 104 | 105 | if (!profile.log_types || profile.log_types.indexOf('codeprofiler') > 0) { 106 | let cpfileobjs = await getThatLogList(profile, '.csv'); 107 | if (cpfileobjs && cpfileobjs.length > 0) { 108 | let newestcpfile = cpfileobjs.reduce((newest, compare) => newest.date.isAfter(compare.date) ? newest : compare); 109 | if (!latestCodeprofilerLogSent || newestcpfile.date.isAfter(latestCodeprofilerLogSent.date)) { 110 | logx.push(newestcpfile); 111 | latestCodeprofilerLogSent = newestcpfile; 112 | } 113 | } 114 | } 115 | 116 | return logx; 117 | } 118 | 119 | let interact = async function (profilename?: string): Promise { 120 | if (!profile) { 121 | if (Object.keys(profiles).length === 1) { 122 | profile = profiles[Object.keys(profiles)[0]]; 123 | } else { 124 | if (profilename === undefined) { 125 | const profileselection = await prompts({ 126 | type: 'select', 127 | name: 'value', 128 | message: 'Select a profile:', 129 | choices: Object.keys(profiles).map(i => ({ 130 | title: ` [${i}] ${profiles[i].hostname}`, 131 | value: `${i}` 132 | })) 133 | }); 134 | profilename = profileselection.value; 135 | } 136 | 137 | if (!profilename) { 138 | logger.log(logger.error, 'ERROR: No profile selected, exiting.'); 139 | process.exit(-1); 140 | } 141 | 142 | if (!profiles[`${profilename}`]) { 143 | logger.log(logger.error, `ERROR: Specified profile ${profilename} not found.`); 144 | process.exit(-1); 145 | } 146 | 147 | profile = profiles[profilename]; 148 | } 149 | setPollingInterval(profile); 150 | } 151 | 152 | let fileobjs = await getThatLogList(profile); 153 | fileobjs.sort((a, b) => b.date.unix() - a.date.unix()); 154 | 155 | let logx: LogFile[] = []; 156 | let logchoiche: Choice[] = []; 157 | 158 | for (let i in fileobjs) { 159 | let sizeformatted = s.lpad(fileobjs[i].size_string, 12); 160 | if (sizeformatted.trim() !== '0.0 kb') { 161 | sizeformatted = yellow(sizeformatted); 162 | } 163 | let dateformatted = s.lpad(fileobjs[i].date.format('YYYY-MM-DD HH:mm:ss'), 20); 164 | if (fileobjs[i].date.isSame(moment.utc(), 'hour')) { 165 | dateformatted = yellow(dateformatted); 166 | } 167 | let logname = s.rpad(fileobjs[i].log, 70); 168 | 169 | logname = logger.colorize(logname, logname); 170 | 171 | logchoiche.push({ 172 | title: `${green(s.lpad(i, 2))} ${logname} ${sizeformatted} ${dateformatted}`, 173 | value: i 174 | }); 175 | } 176 | 177 | let logselection = await prompts({ 178 | type: 'autocompleteMultiselect', 179 | name: 'value', 180 | message: `Select logs on [${green(profile.hostname)}]`, 181 | choices: logchoiche, 182 | // eslint-disable-next-line no-return-assign 183 | onState: ((statedata) => { statedata.value ? statedata.value.forEach((i: Choice) => i.title = `\n${i.title}`) : 'no selection' }) 184 | }); 185 | 186 | if (logselection.value) { // ctrl+c 187 | logselection.value.forEach((i: number) => { 188 | logx.push(fileobjs[i]); 189 | }); 190 | } 191 | 192 | return logx; 193 | } 194 | 195 | let setPollingInterval = function (profile: DwJson) { 196 | if (profile.polling_interval) { 197 | pollingSeconds = profile.polling_interval; 198 | logger.log(logger.info, `Setting polling interval (seconds): ${pollingSeconds}`); 199 | } else { 200 | logger.log(logger.info, `Using default polling interval (seconds): ${pollingSeconds}`); 201 | profile.polling_interval = pollingSeconds; 202 | } 203 | } 204 | 205 | 206 | let getThatLogList = async function (profile: DwJson, filesuffix = ".log"): Promise { 207 | let fileobjs: LogFile[] = []; 208 | 209 | let data = ''; 210 | 211 | if (filesuffix === ".csv") { 212 | data = await logfetcher.fetchLogList(profile, debug, 'codeprofiler'); 213 | } else { 214 | data = await logfetcher.fetchLogList(profile, debug); 215 | } 216 | 217 | let regexp = new RegExp(`[\\s\\S\\&\\?]*?(?:)?(.*?)(?:<\\/tt>)?[\\s\\S\\&\\?]*?(.*?)`, 'gim'); 218 | let match = regexp.exec(data); 219 | 220 | while (match != null) { 221 | let filedate = moment.utc(match[3]); 222 | if (match[1].substr(-4) === filesuffix && filedate.isSame(moment.utc(), 'day')) { 223 | fileobjs.push({ 224 | log: match[1], 225 | size_string: match[2], 226 | date: moment.utc(match[3]), 227 | debug: debug 228 | }); 229 | logger.log(logger.debug, `Available Log: ${match[1]}`, debug); 230 | } 231 | match = regexp.exec(data); 232 | } 233 | 234 | return fileobjs; 235 | } 236 | 237 | let pollLogs = async function (fileobjs: LogFile[], doRollover = false) { 238 | if (logfetcher.isUsingAPI(profile) && logfetcher.errorcount > logfetcher.errorlimit) { 239 | logger.log(logger.error, `Error count (${logfetcher.errorcount}) exceeded limit of ${logfetcher.errorlimit}, resetting Client API token.`); 240 | logfetcher.errorcount = 0; 241 | profile.token = null; 242 | await logfetcher.authorize(profile, debug); 243 | } 244 | 245 | if (!doRollover) { 246 | if (moment.utc().isAfter(fileobjs[0].date, 'day')) { 247 | logger.log(logger.info, 'Logs have rolled over, collecting last entries from old logs.'); 248 | doRollover = true; 249 | } else { 250 | logger.log(logger.debug, 'Logs have not rolled over since last poll cycle.', debug); 251 | if (nextLogRefresh && moment().isSameOrAfter(nextLogRefresh)) { 252 | logger.log(logger.debug, 'Refreshing log list.', debug); 253 | let newfiles = await dontInteract(); 254 | for (let newfile of newfiles) { 255 | if (!fileobjs.some(logfile => logfile.log === newfile.log)) { 256 | logger.log(logger.debug, `Added new log file: ${newfile.log}.`, debug); 257 | fileobjs.push(newfile); 258 | } 259 | } 260 | } 261 | } 262 | 263 | if (fluent) { 264 | fluent.output(profile.hostname, 265 | await logparser.process(fileobjs.map((logobj) => logfetcher.fetchLogContent(profile, logobj))), 266 | false, fileobjs[0].debug); 267 | } else { 268 | let parsed = logemitter.sort( 269 | await logparser.process(fileobjs.map((logobj) => logfetcher.fetchLogContent(profile, logobj))) 270 | ); 271 | logemitter.output(parsed, false, fileobjs[0].debug); 272 | } 273 | 274 | // Codeprofiler files should only be consumed once 275 | let cp = fileobjs.findIndex(logobj => logobj.log.endsWith("csv")); 276 | if (cp > -1) { 277 | logger.log(logger.debug, `Removed codeprofiler log ${fileobjs[cp].log} from list.`, debug); 278 | fileobjs.splice(cp, 1); 279 | } 280 | 281 | } else { 282 | if (interactive) { 283 | fileobjs = await interact(); 284 | } else { 285 | fileobjs = await dontInteract(); 286 | } 287 | 288 | if (fileobjs.length != 0) { 289 | doRollover = false; 290 | for (let i of fileobjs) { 291 | i.size = -1; 292 | } 293 | } else { 294 | logger.log(logger.warn, 'No logs to report yet, waiting until next cycle.'); 295 | } 296 | } 297 | 298 | setTimeout(pollLogs, pollingSeconds * 1000, fileobjs, doRollover); 299 | } 300 | 301 | function replaceEnvPlaceholders(data: any) { 302 | Object.keys(data).forEach(function (key) { 303 | var value = data[key]; 304 | if (typeof (value) === 'object') { 305 | replaceEnvPlaceholders(value); 306 | } else if (typeof (value) === 'string' && value.startsWith(envVarPrefix)) { 307 | var checkForVar = value.replace(envVarPrefix, ""); 308 | if (process.env.hasOwnProperty(checkForVar)) { 309 | data[key] = process.env[checkForVar]; 310 | } 311 | } 312 | }); 313 | return data; 314 | } 315 | 316 | function readDwJson() { 317 | let dwJsonPath = path.join(process.cwd(), 'dw.json'); 318 | logger.log(logger.info, `Loading profile from ${dwJsonPath}\n`); 319 | try { 320 | const dwJson = replaceEnvPlaceholders(JSON.parse(fs.readFileSync(dwJsonPath, 'utf8'))); 321 | const name = dwJson.profile || dwJson.hostname.split('-')[0].split('-')[0]; 322 | profiles[name] = dwJson; 323 | } 324 | catch (err) { 325 | logger.log(logger.error, `No dw.json found in path ${process.cwd()}\n`); 326 | process.exit(-1); 327 | } 328 | } 329 | 330 | function readLogConf() { 331 | try { 332 | logConfig = replaceEnvPlaceholders(JSON.parse(fs.readFileSync(`${process.cwd()}/log.conf.json`, 'utf8'))); 333 | profiles = logConfig.profiles ?? logConfig as any; // support for old configs (without "profiles" group) 334 | if (logConfig.interactive !== undefined && logConfig.interactive === false) { 335 | interactive = false; 336 | logger.log(logger.info, "Interactive mode is disabled."); 337 | } 338 | if (logConfig.fluent && logConfig.fluent.enabled) { 339 | let fluentConfig: FluentConfig = logConfig.fluent; 340 | fluent = new LogFluent(fluentConfig); 341 | logger.log(logger.info, "FluentD output is enabled."); 342 | } else { 343 | logger.log(logger.info, "Console output is enabled."); 344 | } 345 | } catch (err) { 346 | logger.log(logger.error, `\nMissing or invalid log.conf.json.\nError message: ${err}\n`); 347 | process.exit(-1); 348 | } 349 | } 350 | 351 | run(); 352 | --------------------------------------------------------------------------------