From aeed9d6c92be4a4b04472c3635a7a168830c1750 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 12 Oct 2017 08:31:54 +1100 Subject: [PATCH 01/34] fix(tcp): moved serialisation code to logevent --- lib/log4js.js | 50 ++------------------------ lib/logger.js | 97 ++++++++++++++++++++++++++++++++++++++------------- 2 files changed, 74 insertions(+), 73 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index 33a2be2..89b6f50 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -87,52 +87,6 @@ function setLevelForCategory(category, level) { config.categories.set(category, categoryConfig); } -function serialise(logEvent) { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - // Validate that we really are in this case - try { - const logData = logEvent.data.map((e) => { - if (e && e.stack && JSON.stringify(e) === '{}') { - e = { message: e.message, stack: e.stack }; - } - return e; - }); - logEvent.data = logData; - return JSON.stringify(logEvent); - } catch (e) { - return serialise(new LoggingEvent( - 'log4js', - config.levels.ERROR, - ['Unable to serialise log event due to :', e] - )); - } -} - -function deserialise(serialised) { - let event; - try { - event = JSON.parse(serialised); - event.startTime = new Date(event.startTime); - event.level = config.levels.getLevel(event.level.levelStr); - event.data = event.data.map((e) => { - if (e && e.stack) { - const fakeError = new Error(e.message); - fakeError.stack = e.stack; - e = fakeError; - } - return e; - }); - } catch (e) { - event = new LoggingEvent( - 'log4js', - config.levels.ERROR, - ['Unable to parse log:', serialised, 'because: ', e] - ); - } - - return event; -} function sendLogEventToAppender(logEvent) { if (!enabled) return; @@ -145,7 +99,7 @@ function sendLogEventToAppender(logEvent) { function workerDispatch(logEvent) { debug(`sending message to master from worker ${process.pid}`); - process.send({ topic: 'log4js:message', data: serialise(logEvent) }); + process.send({ topic: 'log4js:message', data: logEvent.serialise() }); } function isPM2Master() { @@ -187,7 +141,7 @@ const receiver = (worker, message) => { } if (message && message.topic && message.topic === 'log4js:message') { debug('received message: ', message.data); - sendLogEventToAppender(deserialise(message.data)); + sendLogEventToAppender(LoggingEvent.deserialise(message.data)); } }; diff --git a/lib/logger.js b/lib/logger.js index 62ee280..8131e15 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -11,36 +11,83 @@ try { debug('Clustering support disabled because require(cluster) threw an error: ', e); } -/** - * @name LoggingEvent - * @namespace Log4js - */ -class LoggingEvent { +module.exports = function (levels, getLevelForCategory, setLevelForCategory) { /** - * Models a logging event. - * @constructor - * @param {String} categoryName name of category - * @param {Log4js.Level} level level of message - * @param {Array} data objects to log - * @author Seth Chisamore + * @name LoggingEvent + * @namespace Log4js */ - constructor(categoryName, level, data, context) { - this.startTime = new Date(); - this.categoryName = categoryName; - this.data = data; - this.level = level; - this.context = Object.assign({}, context); - this.pid = process.pid; - if (cluster && cluster.isWorker) { - this.cluster = { - workerId: cluster.worker.id, - worker: process.pid - }; + class LoggingEvent { + /** + * Models a logging event. + * @constructor + * @param {String} categoryName name of category + * @param {Log4js.Level} level level of message + * @param {Array} data objects to log + * @author Seth Chisamore + */ + constructor(categoryName, level, data, context) { + this.startTime = new Date(); + this.categoryName = categoryName; + this.data = data; + this.level = level; + this.context = Object.assign({}, context); + this.pid = process.pid; + if (cluster && cluster.isWorker) { + this.cluster = { + workerId: cluster.worker.id, + worker: process.pid + }; + } + } + + serialise() { + // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. + // The following allows us to serialize errors correctly. + // Validate that we really are in this case + try { + const logData = this.data.map((e) => { + if (e && e.stack && JSON.stringify(e) === '{}') { + e = { message: e.message, stack: e.stack }; + } + return e; + }); + this.data = logData; + return JSON.stringify(this); + } catch (e) { + return new LoggingEvent( + 'log4js', + levels.ERROR, + ['Unable to serialise log event due to :', e] + ).serialise(); + } + } + + static deserialise(serialised) { + let event; + try { + event = JSON.parse(serialised); + event.startTime = new Date(event.startTime); + event.level = levels.getLevel(event.level.levelStr); + event.data = event.data.map((e) => { + if (e && e.stack) { + const fakeError = new Error(e.message); + fakeError.stack = e.stack; + e = fakeError; + } + return e; + }); + } catch (e) { + event = new LoggingEvent( + 'log4js', + levels.ERROR, + ['Unable to parse log:', serialised, 'because: ', e] + ); + } + + return event; } } -} -module.exports = function (levels, getLevelForCategory, setLevelForCategory) { /** * Logger to log messages. * use {@see log4js#getLogger(String)} to get an instance. From e05d50ef449a231098994c0d5afb680e2c217c86 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 28 Nov 2017 08:44:06 +1100 Subject: [PATCH 02/34] feat(tcp): initial version of tcp client appender --- lib/appenders/tcp.js | 76 +++++++++++++++++++++++++++++++++++ test/tap/server-test.js | 6 +++ test/tap/tcp-appender-test.js | 45 +++++++++++++++++++++ 3 files changed, 127 insertions(+) create mode 100644 lib/appenders/tcp.js create mode 100644 test/tap/server-test.js create mode 100644 test/tap/tcp-appender-test.js diff --git a/lib/appenders/tcp.js b/lib/appenders/tcp.js new file mode 100644 index 0000000..968f075 --- /dev/null +++ b/lib/appenders/tcp.js @@ -0,0 +1,76 @@ +'use strict'; + +const debug = require('debug')('log4js:tcp'); +const net = require('net'); + +function appender(config) { + let canWrite = false; + const buffer = []; + let socket; + let shutdownAttempts = 3; + + function write(loggingEvent) { + debug('Writing log event to socket'); + canWrite = socket.write(loggingEvent.serialise(), 'utf8'); + } + + function emptyBuffer() { + let evt; + debug('emptying buffer'); + /* eslint no-cond-assign:0 */ + while ((evt = buffer.shift())) { + write(evt); + } + } + + function createSocket() { + debug(`appender creating socket to ${config.host || 'localhost'}:${config.port || 5000}`); + socket = net.createConnection(config.port || 5000, config.host || 'localhost'); + socket.on('connect', () => { + debug('socket connected'); + emptyBuffer(); + canWrite = true; + }); + socket.on('drain', () => { + debug('drain event received, emptying buffer'); + canWrite = true; + emptyBuffer(); + }); + socket.on('timeout', socket.end.bind(socket)); + // don't bother listening for 'error', 'close' gets called after that anyway + socket.on('close', createSocket); + } + + createSocket(); + + function log(loggingEvent) { + if (canWrite) { + write(loggingEvent); + } else { + debug('buffering log event because it cannot write at the moment'); + buffer.push(loggingEvent); + } + } + + log.shutdown = function (cb) { + debug('shutdown called'); + if (buffer.length && shutdownAttempts) { + debug('buffer has items, waiting 100ms to empty'); + shutdownAttempts -= 1; + setTimeout(() => { + log.shutdown(cb); + }, 100); + } else { + socket.removeAllListeners('close'); + socket.end(cb); + } + }; + return log; +} + +function configure(config) { + debug(`configure with config = ${config}`); + return appender(config); +} + +module.exports.configure = configure; diff --git a/test/tap/server-test.js b/test/tap/server-test.js new file mode 100644 index 0000000..a713e47 --- /dev/null +++ b/test/tap/server-test.js @@ -0,0 +1,6 @@ +const test = require('tap').test; + +test('TCP Server', (batch) => { + batch.test('should listen for TCP messages and re-send via process.send'); + batch.end(); +}); diff --git a/test/tap/tcp-appender-test.js b/test/tap/tcp-appender-test.js new file mode 100644 index 0000000..20d4316 --- /dev/null +++ b/test/tap/tcp-appender-test.js @@ -0,0 +1,45 @@ +const test = require('tap').test; +const net = require('net'); +const log4js = require('../../lib/log4js'); + +const messages = []; +const server = net.createServer((socket) => { + socket.setEncoding('utf8'); + socket.on('data', (data) => { + messages.push(JSON.parse(data)); + }); +}); + +server.unref(); + +server.listen(() => { + const port = server.address().port; + log4js.configure({ + appenders: { + tcp: { type: 'tcp', port: port } + }, + categories: { + default: { appenders: ['tcp'], level: 'debug' } + } + }); + + const logger = log4js.getLogger(); + logger.info('This should be sent via TCP.'); + log4js.shutdown(() => { + server.close(() => { + test('TCP Appender', (batch) => { + batch.test('should send log messages as JSON over TCP', (t) => { + t.equal(messages.length, 1); + t.match(messages[0], { + data: ['This should be sent via TCP.'], + categoryName: 'default', + context: {}, + level: { levelStr: 'INFO' } + }); + t.end(); + }); + batch.end(); + }); + }); + }); +}); From 319562914b9a759dc02e98efa3154684ed81bacb Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 12 Dec 2017 08:03:57 +1100 Subject: [PATCH 03/34] fix(tcp): interim commit while I work on something else --- lib/LoggingEvent.js | 79 ++++++++++++++++++++++++++++++++++++++ lib/clustering.js | 81 +++++++++++++++++++++++++++++++++++++++ lib/configuration.js | 32 ++++------------ lib/log4js.js | 49 ++---------------------- lib/logger.js | 84 +---------------------------------------- lib/server.js | 24 ++++++++++++ test/tap/server-test.js | 40 +++++++++++++++++++- 7 files changed, 236 insertions(+), 153 deletions(-) create mode 100644 lib/LoggingEvent.js create mode 100644 lib/clustering.js create mode 100644 lib/server.js diff --git a/lib/LoggingEvent.js b/lib/LoggingEvent.js new file mode 100644 index 0000000..cb7cbd1 --- /dev/null +++ b/lib/LoggingEvent.js @@ -0,0 +1,79 @@ +module.exports = (levels) => { + /** + * @name LoggingEvent + * @namespace Log4js + */ + class LoggingEvent { + /** + * Models a logging event. + * @constructor + * @param {String} categoryName name of category + * @param {Log4js.Level} level level of message + * @param {Array} data objects to log + * @author Seth Chisamore + */ + constructor(categoryName, level, data, context) { + this.startTime = new Date(); + this.categoryName = categoryName; + this.data = data; + this.level = level; + this.context = Object.assign({}, context); + this.pid = process.pid; + // if (cluster && cluster.isWorker) { + // this.cluster = { + // workerId: cluster.worker.id, + // worker: process.pid + // }; + // } + } + + serialise() { + // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. + // The following allows us to serialize errors correctly. + // Validate that we really are in this case + try { + const logData = this.data.map((e) => { + if (e && e.stack && JSON.stringify(e) === '{}') { + e = { message: e.message, stack: e.stack }; + } + return e; + }); + this.data = logData; + return JSON.stringify(this); + } catch (e) { + return new LoggingEvent( + 'log4js', + levels.ERROR, + ['Unable to serialise log event due to :', e] + ).serialise(); + } + } + + static deserialise(serialised) { + let event; + try { + event = JSON.parse(serialised); + event.startTime = new Date(event.startTime); + event.level = levels.getLevel(event.level.levelStr); + event.data = event.data.map((e) => { + if (e && e.stack) { + const fakeError = new Error(e.message); + fakeError.stack = e.stack; + e = fakeError; + } + return e; + }); + } catch (e) { + event = new LoggingEvent( + 'log4js', + levels.ERROR, + ['Unable to parse log:', serialised, 'because: ', e] + ); + } + + return event; + } + } + + return LoggingEvent; +}; diff --git a/lib/clustering.js b/lib/clustering.js new file mode 100644 index 0000000..b046dbf --- /dev/null +++ b/lib/clustering.js @@ -0,0 +1,81 @@ +const debug = require('debug')('log4js:clustering'); + +let cluster; +try { + cluster = require('cluster'); // eslint-disable-line global-require +} catch (e) { + debug('Clustering support disabled because require(cluster) threw an error: ', e); +} + +module.exports = (config) => { + const disabled = config.disableClustering || !cluster; + const pm2 = config.pm2; + const pm2InstanceVar = config.pm2InstanceVar || 'NODE_APP_INSTANCE'; + const listeners = []; + + debug(`clustering disabled ? ${disabled}`); + debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`); + debug(`pm2 enabled ? ${pm2}`); + debug(`pm2InstanceVar = ${pm2InstanceVar}`); + debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`); + + const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0'; + const isMaster = () => disabled || cluster.isMaster || isPM2Master(); + const isWorker = () => !isMaster(); + + // in a multi-process node environment, worker loggers will use + // process.send + const receiver = (worker, message) => { + // prior to node v6, the worker parameter was not passed (args were message, handle) + debug('cluster message received from worker ', worker, ': ', message); + if (worker.topic && worker.data) { + message = worker; + worker = undefined; + } + if (message && message.topic && message.topic === 'log4js:message') { + debug('received message: ', message.data); + const logEvent = LoggingEvent.deserialise(message.data); + listeners.forEach(l => l(logEvent)); + } + }; + + // just in case configure is called after shutdown + pm2 && process.removeListener('message', receiver); + cluster.removeListener('message', receiver); + if (config.disableClustering) { + debug('Not listening for cluster messages, because clustering disabled.'); + } else if (isPM2Master()) { + // PM2 cluster support + // PM2 runs everything as workers - install pm2-intercom for this to work. + // we only want one of the app instances to write logs + debug('listening for PM2 broadcast messages'); + process.on('message', receiver); + } else if (cluster.isMaster) { + debug('listening for cluster messages'); + cluster.on('message', receiver); + } else { + debug('not listening for messages, because we are not a master process'); + } + + + return { + onlyOnMaster: (fn) => { + if (isMaster()) { + fn(); + } + }, + onlyOnWorker: (fn) => { + if (isWorker()) { + fn(); + } + }, + isMaster: isMaster, + isWorker: isWorker, + send: (msg) => { + + }, + onMessage: (listener) => { + listeners.push(listener); + } + }; +}; diff --git a/lib/configuration.js b/lib/configuration.js index b471d9d..d59a2ce 100644 --- a/lib/configuration.js +++ b/lib/configuration.js @@ -4,15 +4,9 @@ const util = require('util'); const path = require('path'); const levels = require('./levels'); const layouts = require('./layouts'); +const clustering = require('./clustering'); const debug = require('debug')('log4js:configuration'); -let cluster; -try { - cluster = require('cluster'); // eslint-disable-line global-require -} catch (e) { - debug('Clustering support disabled because require(cluster) threw an error: ', e); -} - const validColours = [ 'white', 'grey', 'black', 'blue', 'cyan', 'green', @@ -80,18 +74,12 @@ class Configuration { debug(`DEPRECATION: Appender ${config.type} exports a shutdown function.`); } - if (this.disableClustering || cluster.isMaster || (this.pm2 && process.env[this.pm2InstanceVar] === '0')) { - debug(`cluster.isMaster ? ${cluster.isMaster}`); - debug(`pm2 enabled ? ${this.pm2}`); - debug(`pm2InstanceVar = ${this.pm2InstanceVar}`); - debug(`process.env[${this.pm2InstanceVar}] = ${process.env[this.pm2InstanceVar]}`); - return appenderModule.configure( - config, - layouts, - this.configuredAppenders.get.bind(this.configuredAppenders), - this.configuredLevels - ); - } + this.clustering.onlyOnMaster(() => appenderModule.configure( + config, + layouts, + this.configuredAppenders.get.bind(this.configuredAppenders), + this.configuredLevels + )); return () => {}; } @@ -203,12 +191,8 @@ class Configuration { this.throwExceptionIf(not(anObject(candidate.appenders)), 'must have a property "appenders" of type object.'); this.throwExceptionIf(not(anObject(candidate.categories)), 'must have a property "categories" of type object.'); - this.disableClustering = this.candidate.disableClustering || !cluster; - - this.pm2 = this.candidate.pm2; - this.pm2InstanceVar = this.candidate.pm2InstanceVar || 'NODE_APP_INSTANCE'; - this.levels = candidate.levels; + this.clustering = clustering(this.candidate); this.appenders = candidate.appenders; this.categories = candidate.categories; } diff --git a/lib/log4js.js b/lib/log4js.js index c8484b6..c4b79aa 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -29,12 +29,6 @@ const connectModule = require('./connect-logger'); const logger = require('./logger'); const layouts = require('./layouts'); -let cluster; -try { - cluster = require('cluster'); // eslint-disable-line global-require -} catch (e) { - debug('Clustering support disabled because require(cluster) threw an error: ', e); -} const defaultConfig = { appenders: { @@ -49,6 +43,7 @@ let Logger; let LoggingEvent; let config; let connectLogger; +let clustering; let enabled = false; function configForCategory(category) { @@ -101,14 +96,6 @@ function workerDispatch(logEvent) { process.send({ topic: 'log4js:message', data: logEvent.serialise() }); } -function isPM2Master() { - return config.pm2 && process.env[config.pm2InstanceVar] === '0'; -} - -function isMaster() { - return config.disableClustering || cluster.isMaster || isPM2Master(); -} - /** * Get a logger instance. * @static @@ -129,21 +116,6 @@ function loadConfigurationFile(filename) { return filename; } -// in a multi-process node environment, worker loggers will use -// process.send -const receiver = (worker, message) => { - // prior to node v6, the worker parameter was not passed (args were message, handle) - debug('cluster message received from worker ', worker, ': ', message); - if (worker.topic && worker.data) { - message = worker; - worker = undefined; - } - if (message && message.topic && message.topic === 'log4js:message') { - debug('received message: ', message.data); - sendLogEventToAppender(LoggingEvent.deserialise(message.data)); - } -}; - function configure(configurationFileOrObject) { let configObject = configurationFileOrObject; @@ -152,29 +124,14 @@ function configure(configurationFileOrObject) { } debug(`Configuration is ${configObject}`); config = new Configuration(configObject); + clustering = config.clustering; module.exports.levels = config.levels; const loggerModule = logger(config.levels, levelForCategory, setLevelForCategory); Logger = loggerModule.Logger; LoggingEvent = loggerModule.LoggingEvent; module.exports.connectLogger = connectModule(config.levels).connectLogger; - // just in case configure is called after shutdown - process.removeListener('message', receiver); - cluster.removeListener('message', receiver); - if (config.disableClustering) { - debug('Not listening for cluster messages, because clustering disabled.'); - } else if (isPM2Master()) { - // PM2 cluster support - // PM2 runs everything as workers - install pm2-intercom for this to work. - // we only want one of the app instances to write logs - debug('listening for PM2 broadcast messages'); - process.on('message', receiver); - } else if (cluster.isMaster) { - debug('listening for cluster messages'); - cluster.on('message', receiver); - } else { - debug('not listening for messages, because we are not a master process'); - } + clustering.onMessage(sendLogEventToAppender); enabled = true; } diff --git a/lib/logger.js b/lib/logger.js index 0da1182..9bdd010 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -3,90 +3,10 @@ 'use strict'; const debug = require('debug')('log4js:logger'); - -let cluster; -try { - cluster = require('cluster'); // eslint-disable-line global-require -} catch (e) { - debug('Clustering support disabled because require(cluster) threw an error: ', e); -} +const loggingEventModule = require('./LoggingEvent'); module.exports = function (levels, getLevelForCategory, setLevelForCategory) { - /** - * @name LoggingEvent - * @namespace Log4js - */ - class LoggingEvent { - /** - * Models a logging event. - * @constructor - * @param {String} categoryName name of category - * @param {Log4js.Level} level level of message - * @param {Array} data objects to log - * @author Seth Chisamore - */ - constructor(categoryName, level, data, context) { - this.startTime = new Date(); - this.categoryName = categoryName; - this.data = data; - this.level = level; - this.context = Object.assign({}, context); - this.pid = process.pid; - if (cluster && cluster.isWorker) { - this.cluster = { - workerId: cluster.worker.id, - worker: process.pid - }; - } - } - - serialise() { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - // Validate that we really are in this case - try { - const logData = this.data.map((e) => { - if (e && e.stack && JSON.stringify(e) === '{}') { - e = { message: e.message, stack: e.stack }; - } - return e; - }); - this.data = logData; - return JSON.stringify(this); - } catch (e) { - return new LoggingEvent( - 'log4js', - levels.ERROR, - ['Unable to serialise log event due to :', e] - ).serialise(); - } - } - - static deserialise(serialised) { - let event; - try { - event = JSON.parse(serialised); - event.startTime = new Date(event.startTime); - event.level = levels.getLevel(event.level.levelStr); - event.data = event.data.map((e) => { - if (e && e.stack) { - const fakeError = new Error(e.message); - fakeError.stack = e.stack; - e = fakeError; - } - return e; - }); - } catch (e) { - event = new LoggingEvent( - 'log4js', - levels.ERROR, - ['Unable to parse log:', serialised, 'because: ', e] - ); - } - - return event; - } - } + const LoggingEvent = loggingEventModule(levels); /** * Logger to log messages. diff --git a/lib/server.js b/lib/server.js new file mode 100644 index 0000000..32cd555 --- /dev/null +++ b/lib/server.js @@ -0,0 +1,24 @@ +const net = require('net'); + +module.exports = (config, clustering) => { + // dummy shutdown if we're not master + let shutdown = (cb) => { cb(); }; + + clustering.onlyOnMaster(() => { + const server = net.createServer((socket) => { + socket.setEncoding('utf8'); + socket.on('data', clustering.send); + socket.on('end', clustering.send); + }); + + server.listen(config.port || 5000, config.host || 'localhost', () => { + server.unref(); + }); + + shutdown = (cb) => { + server.close(cb); + }; + }); + + return shutdown; +}; diff --git a/test/tap/server-test.js b/test/tap/server-test.js index a713e47..77c2c9b 100644 --- a/test/tap/server-test.js +++ b/test/tap/server-test.js @@ -1,6 +1,44 @@ const test = require('tap').test; +const net = require('net'); +const log4js = require('../../lib/log4js'); +const vcr = require('../../lib/appenders/recording'); +const levels = require('../../lib/levels')(); +const LoggingEvent = (require('../../lib/logger')(levels)).LoggingEvent; + +log4js.configure({ + appenders: { + vcr: { type: 'recording' } + }, + categories: { + default: { appenders: ['vcr'], level: 'debug' } + }, + listen: { + port: 5678 + } +}); test('TCP Server', (batch) => { - batch.test('should listen for TCP messages and re-send via process.send'); + batch.test('should listen for TCP messages and re-send via process.send', (t) => { + const socket = net.connect(5678, () => { + socket.write( + (new LoggingEvent('test-category', levels.INFO, ['something'], {})).serialise(), + () => { + socket.end(); + log4js.shutdown(() => { + const logs = vcr.replay(); + t.equal(logs.length, 1); + t.match(logs[0], { + data: ['something'], + categoryName: 'test-category', + level: { levelStr: 'INFO' }, + context: {} + }); + t.end(); + }); + } + ); + }); + socket.unref(); + }); batch.end(); }); From 8084e80027dcfa290fc92dd4df85f935807106e4 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 17 Jan 2018 08:24:50 +1100 Subject: [PATCH 04/34] fix(tests): tests failing - config listeners not working in sandbox --- examples/layouts.js | 13 + lib/LoggingEvent.js | 145 +++--- lib/appenders/categoryFilter.js | 4 + lib/appenders/index.js | 97 ++++ lib/categories.js | 125 +++++ lib/clustering.js | 122 +++-- lib/configuration.js | 218 ++------ lib/connect-logger.js | 150 +++--- lib/levels.js | 174 ++++--- lib/log4js.js | 96 +--- lib/logger.js | 173 +++---- test/tap/configuration-validation-test.js | 605 +++++++++++----------- test/tap/connect-logger-test.js | 4 +- test/tap/connect-nolog-test.js | 4 +- test/tap/levels-test.js | 2 +- test/tap/logger-test.js | 31 +- test/tap/server-test.js | 4 +- 17 files changed, 1025 insertions(+), 942 deletions(-) create mode 100644 examples/layouts.js create mode 100644 lib/appenders/index.js create mode 100644 lib/categories.js diff --git a/examples/layouts.js b/examples/layouts.js new file mode 100644 index 0000000..0d47444 --- /dev/null +++ b/examples/layouts.js @@ -0,0 +1,13 @@ +const log4js = require('../lib/log4js'); + +log4js.configure({ + appenders: { + out: { type: 'stdout', layout: { type: 'messagePassThrough' } } + }, + categories: { + default: { appenders: ['out'], level: 'info' } + } +}); + +const logger = log4js.getLogger('thing'); +logger.info('This should not have a timestamp'); diff --git a/lib/LoggingEvent.js b/lib/LoggingEvent.js index fe8b1cb..c1389b2 100644 --- a/lib/LoggingEvent.js +++ b/lib/LoggingEvent.js @@ -1,81 +1,80 @@ const CircularJSON = require('circular-json'); +const levels = require('./levels'); -module.exports = (levels) => { +/** + * @name LoggingEvent + * @namespace Log4js + */ +class LoggingEvent { /** - * @name LoggingEvent - * @namespace Log4js + * Models a logging event. + * @constructor + * @param {String} categoryName name of category + * @param {Log4js.Level} level level of message + * @param {Array} data objects to log + * @author Seth Chisamore */ - class LoggingEvent { - /** - * Models a logging event. - * @constructor - * @param {String} categoryName name of category - * @param {Log4js.Level} level level of message - * @param {Array} data objects to log - * @author Seth Chisamore - */ - constructor(categoryName, level, data, context) { - this.startTime = new Date(); - this.categoryName = categoryName; - this.data = data; - this.level = level; - this.context = Object.assign({}, context); - this.pid = process.pid; - // if (cluster && cluster.isWorker) { - // this.cluster = { - // workerId: cluster.worker.id, - // worker: process.pid - // }; - // } - } + constructor(categoryName, level, data, context) { + this.startTime = new Date(); + this.categoryName = categoryName; + this.data = data; + this.level = level; + this.context = Object.assign({}, context); + this.pid = process.pid; + // if (cluster && cluster.isWorker) { + // this.cluster = { + // workerId: cluster.worker.id, + // worker: process.pid + // }; + // } + } - serialise() { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - // Validate that we really are in this case - try { - const logData = this.data.map((e) => { - if (e && e.stack && CircularJSON.stringify(e) === '{}') { - e = { message: e.message, stack: e.stack }; - } - return e; - }); - this.data = logData; - return CircularJSON.stringify(this); - } catch (e) { - return new LoggingEvent( - 'log4js', - levels.ERROR, - ['Unable to serialise log event due to :', e] - ).serialise(); - } - } - - static deserialise(serialised) { - let event; - try { - event = CircularJSON.parse(serialised); - event.startTime = new Date(event.startTime); - event.level = levels.getLevel(event.level.levelStr); - event.data = event.data.map((e) => { - if (e && e.stack) { - const fakeError = new Error(e.message); - fakeError.stack = e.stack; - e = fakeError; - } - return e; - }); - } catch (e) { - event = new LoggingEvent( - 'log4js', - levels.ERROR, - ['Unable to parse log:', serialised, 'because: ', e] - ); - } - - return event; + serialise() { + // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. + // The following allows us to serialize errors correctly. + // Validate that we really are in this case + try { + const logData = this.data.map((e) => { + if (e && e.stack && CircularJSON.stringify(e) === '{}') { + e = { message: e.message, stack: e.stack }; + } + return e; + }); + this.data = logData; + return CircularJSON.stringify(this); + } catch (e) { + return new LoggingEvent( + 'log4js', + levels.ERROR, + ['Unable to serialise log event due to :', e] + ).serialise(); } } - return LoggingEvent; -}; + static deserialise(serialised) { + let event; + try { + event = CircularJSON.parse(serialised); + event.startTime = new Date(event.startTime); + event.level = levels.getLevel(event.level.levelStr); + event.data = event.data.map((e) => { + if (e && e.stack) { + const fakeError = new Error(e.message); + fakeError.stack = e.stack; + e = fakeError; + } + return e; + }); + } catch (e) { + event = new LoggingEvent( + 'log4js', + levels.ERROR, + ['Unable to parse log:', serialised, 'because: ', e] + ); + } + + return event; + } +} + +module.exports = LoggingEvent; diff --git a/lib/appenders/categoryFilter.js b/lib/appenders/categoryFilter.js index 263970b..4ec8327 100644 --- a/lib/appenders/categoryFilter.js +++ b/lib/appenders/categoryFilter.js @@ -1,9 +1,13 @@ 'use strict'; +const debug = require('debug')('log4js:categoryFilter'); + function categoryFilter(excludes, appender) { if (typeof excludes === 'string') excludes = [excludes]; return (logEvent) => { + debug(`Checking ${logEvent.categoryName} against ${excludes}`); if (excludes.indexOf(logEvent.categoryName) === -1) { + debug('Not excluded, sending to appender'); appender(logEvent); } }; diff --git a/lib/appenders/index.js b/lib/appenders/index.js new file mode 100644 index 0000000..c5448e0 --- /dev/null +++ b/lib/appenders/index.js @@ -0,0 +1,97 @@ +const path = require('path'); +const debug = require('debug')('log4js:appenders'); +const configuration = require('../configuration'); +const clustering = require('../clustering'); +const levels = require('../levels'); +const layouts = require('../layouts'); + +const appenders = new Map(); + +const tryLoading = (modulePath, config) => { + debug('Loading module from ', modulePath); + try { + return require(modulePath); //eslint-disable-line + } catch (e) { + // if the module was found, and we still got an error, then raise it + configuration.throwExceptionIf( + config, + e.code !== 'MODULE_NOT_FOUND', + `appender "${modulePath}" could not be loaded (error was: ${e})` + ); + return undefined; + } +}; + +const loadAppenderModule = (type, config) => tryLoading(`./${type}`, config) || + tryLoading(type, config) || + tryLoading(path.join(path.dirname(require.main.filename), type), config) || + tryLoading(path.join(process.cwd(), type), config); + +const createAppender = (name, config) => { + const appenderConfig = config.appenders[name]; + const appenderModule = loadAppenderModule(appenderConfig.type, config); + configuration.throwExceptionIf( + config, + configuration.not(appenderModule), + `appender "${name}" is not valid (type "${appenderConfig.type}" could not be found)` + ); + if (appenderModule.appender) { + debug(`DEPRECATION: Appender ${appenderConfig.type} exports an appender function.`); + } + if (appenderModule.shutdown) { + debug(`DEPRECATION: Appender ${appenderConfig.type} exports a shutdown function.`); + } + + debug(`${name}: clustering.isMaster ? ${clustering.isMaster()}`); + debug(`${name}: appenderModule is ${require('util').inspect(appenderModule)}`); // eslint-disable-line + return clustering.onlyOnMaster(() => { + debug(`calling appenderModule.configure for ${name} / ${appenderConfig.type}`); + return appenderModule.configure( + appenderConfig, + layouts, + appender => appenders.get(appender), + levels + ); + }, () => {}); +}; + +const setup = (config) => { + appenders.clear(); + + Object.keys(config.appenders).forEach((name) => { + debug(`Creating appender ${name}`); + appenders.set(name, createAppender(name, config)); + }); +}; + +// setup({ +// appenders: { +// stdout: { type: 'stdout' } +// } +// }); + +configuration.addListener((config) => { + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(config.appenders)), + 'must have a property "appenders" of type object.' + ); + const appenderNames = Object.keys(config.appenders); + configuration.throwExceptionIf( + config, + configuration.not(appenderNames.length), + 'must define at least one appender.' + ); + + appenderNames.forEach((name) => { + configuration.throwExceptionIf( + config, + configuration.not(config.appenders[name].type), + `appender "${name}" is not valid (must be an object with property "type")` + ); + }); +}); + +configuration.addListener(setup); + +module.exports = appenders; diff --git a/lib/categories.js b/lib/categories.js new file mode 100644 index 0000000..b107c66 --- /dev/null +++ b/lib/categories.js @@ -0,0 +1,125 @@ +const configuration = require('./configuration'); +const levels = require('./levels'); +const appenders = require('./appenders'); +const debug = require('debug')('log4js:categories'); + +const categories = new Map(); + +configuration.addListener((config) => { + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(config.categories)), + 'must have a property "categories" of type object.' + ); + + const categoryNames = Object.keys(config.categories); + configuration.throwExceptionIf( + config, + configuration.not(categoryNames.length), + 'must define at least one category.' + ); + + categoryNames.forEach((name) => { + const category = config.categories[name]; + configuration.throwExceptionIf( + config, + [ + configuration.not(category.appenders), + configuration.not(category.level) + ], + `category "${name}" is not valid (must be an object with properties "appenders" and "level")` + ); + + configuration.throwExceptionIf( + config, + configuration.not(Array.isArray(category.appenders)), + `category "${name}" is not valid (appenders must be an array of appender names)` + ); + + configuration.throwExceptionIf( + config, + configuration.not(category.appenders.length), + `category "${name}" is not valid (appenders must contain at least one appender name)` + ); + + category.appenders.forEach((appender) => { + configuration.throwExceptionIf( + config, + configuration.not(appenders.get(appender)), + `category "${name}" is not valid (appender "${appender}" is not defined)` + ); + }); + + configuration.throwExceptionIf( + config, + configuration.not(levels.getLevel(category.level)), + `category "${name}" is not valid (level "${category.level}" not recognised;` + + ` valid levels are ${levels.levels.join(', ')})` + ); + }); + + configuration.throwExceptionIf( + config, + configuration.not(config.categories.default), + 'must define a "default" category.' + ); +}); + +const setup = (config) => { + categories.clear(); + + const categoryNames = Object.keys(config.categories); + categoryNames.forEach((name) => { + const category = config.categories[name]; + const categoryAppenders = []; + category.appenders.forEach((appender) => { + categoryAppenders.push(appenders.get(appender)); + debug(`Creating category ${name}`); + categories.set( + name, + { appenders: categoryAppenders, level: levels.getLevel(category.level) } + ); + }); + }); +}; + +// setup({ +// categories: { default: { appenders: ['stdout'], level: 'OFF' } } +// }); +configuration.addListener(setup); + +const configForCategory = (category) => { + debug(`configForCategory: searching for config for ${category}`); + if (categories.has(category)) { + debug(`configForCategory: ${category} exists in config, returning it`); + return categories.get(category); + } + if (category.indexOf('.') > 0) { + debug(`configForCategory: ${category} has hierarchy, searching for parents`); + return configForCategory(category.substring(0, category.lastIndexOf('.'))); + } + debug('configForCategory: returning config for default category'); + return configForCategory('default'); +}; + +const appendersForCategory = category => configForCategory(category).appenders; +const getLevelForCategory = category => configForCategory(category).level; + +const setLevelForCategory = (category, level) => { + let categoryConfig = categories.get(category); + debug(`setLevelForCategory: found ${categoryConfig} for ${category}`); + if (!categoryConfig) { + const sourceCategoryConfig = configForCategory(category); + debug('setLevelForCategory: no config found for category, ' + + `found ${sourceCategoryConfig} for parents of ${category}`); + categoryConfig = { appenders: sourceCategoryConfig.appenders }; + } + categoryConfig.level = level; + categories.set(category, categoryConfig); +}; + +module.exports = { + appendersForCategory, + getLevelForCategory, + setLevelForCategory +}; diff --git a/lib/clustering.js b/lib/clustering.js index b046dbf..814e320 100644 --- a/lib/clustering.js +++ b/lib/clustering.js @@ -1,47 +1,60 @@ const debug = require('debug')('log4js:clustering'); +const LoggingEvent = require('./LoggingEvent'); +const configuration = require('./configuration'); +const cluster = require('cluster'); -let cluster; -try { - cluster = require('cluster'); // eslint-disable-line global-require -} catch (e) { - debug('Clustering support disabled because require(cluster) threw an error: ', e); -} +const listeners = []; -module.exports = (config) => { - const disabled = config.disableClustering || !cluster; - const pm2 = config.pm2; - const pm2InstanceVar = config.pm2InstanceVar || 'NODE_APP_INSTANCE'; - const listeners = []; +let disabled = false; +let pm2 = false; +let pm2InstanceVar = 'NODE_APP_INSTANCE'; + +const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0'; +const isMaster = () => disabled || cluster.isMaster || isPM2Master(); +const isWorker = () => !isMaster(); + +const sendToListeners = (logEvent) => { + listeners.forEach(l => l(logEvent)); +}; + +// in a multi-process node environment, worker loggers will use +// process.send +const receiver = (worker, message) => { + // prior to node v6, the worker parameter was not passed (args were message, handle) + debug('cluster message received from worker ', worker, ': ', message); + if (worker.topic && worker.data) { + message = worker; + worker = undefined; + } + if (message && message.topic && message.topic === 'log4js:message') { + debug('received message: ', message.data); + const logEvent = LoggingEvent.deserialise(message.data); + sendToListeners(logEvent); + } +}; + +configuration.addListener((config) => { + // clear out the listeners, because configure has been called. + listeners.length = 0; + + disabled = config.disableClustering; + pm2 = config.pm2; + pm2InstanceVar = config.pm2InstanceVar || 'NODE_APP_INSTANCE'; debug(`clustering disabled ? ${disabled}`); - debug(`cluster.isMaster ? ${cluster && cluster.isMaster}`); + debug(`cluster.isMaster ? ${cluster.isMaster}`); debug(`pm2 enabled ? ${pm2}`); debug(`pm2InstanceVar = ${pm2InstanceVar}`); debug(`process.env[${pm2InstanceVar}] = ${process.env[pm2InstanceVar]}`); - const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0'; - const isMaster = () => disabled || cluster.isMaster || isPM2Master(); - const isWorker = () => !isMaster(); - - // in a multi-process node environment, worker loggers will use - // process.send - const receiver = (worker, message) => { - // prior to node v6, the worker parameter was not passed (args were message, handle) - debug('cluster message received from worker ', worker, ': ', message); - if (worker.topic && worker.data) { - message = worker; - worker = undefined; - } - if (message && message.topic && message.topic === 'log4js:message') { - debug('received message: ', message.data); - const logEvent = LoggingEvent.deserialise(message.data); - listeners.forEach(l => l(logEvent)); - } - }; - // just in case configure is called after shutdown - pm2 && process.removeListener('message', receiver); - cluster.removeListener('message', receiver); + if (pm2) { + process.removeListener('message', receiver); + } + if (cluster.removeListener) { + cluster.removeListener('message', receiver); + } + if (config.disableClustering) { debug('Not listening for cluster messages, because clustering disabled.'); } else if (isPM2Master()) { @@ -56,26 +69,29 @@ module.exports = (config) => { } else { debug('not listening for messages, because we are not a master process'); } +}); - - return { - onlyOnMaster: (fn) => { - if (isMaster()) { - fn(); +module.exports = { + onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster), + onlyOnWorker: (fn, notWorker) => (isWorker() ? fn() : notWorker), + isMaster: isMaster, + isWorker: isWorker, + send: (msg) => { + if (isWorker()) { + if (pm2) { + process.send({ type: 'log4js:message', data: msg.serialise() }); + } else { + msg.cluster = { + workerId: cluster.worker.id, + worker: process.pid + }; + cluster.send({ type: 'log4js:message', data: msg.serialise() }); } - }, - onlyOnWorker: (fn) => { - if (isWorker()) { - fn(); - } - }, - isMaster: isMaster, - isWorker: isWorker, - send: (msg) => { - - }, - onMessage: (listener) => { - listeners.push(listener); + } else { + sendToListeners(msg); } - }; + }, + onMessage: (listener) => { + listeners.push(listener); + } }; diff --git a/lib/configuration.js b/lib/configuration.js index d59a2ce..b798cf3 100644 --- a/lib/configuration.js +++ b/lib/configuration.js @@ -1,201 +1,49 @@ 'use strict'; const util = require('util'); -const path = require('path'); -const levels = require('./levels'); -const layouts = require('./layouts'); -const clustering = require('./clustering'); const debug = require('debug')('log4js:configuration'); -const validColours = [ - 'white', 'grey', 'black', - 'blue', 'cyan', 'green', - 'magenta', 'red', 'yellow' -]; +const listeners = []; -function not(thing) { - return !thing; -} +const not = thing => !thing; -function anObject(thing) { - return thing && typeof thing === 'object' && !Array.isArray(thing); -} +const anObject = thing => thing && typeof thing === 'object' && !Array.isArray(thing); -function validIdentifier(thing) { - return /^[A-Za-z][A-Za-z0-9_]*$/g.test(thing); -} +const validIdentifier = thing => /^[A-Za-z][A-Za-z0-9_]*$/g.test(thing); -function anInteger(thing) { - return thing && typeof thing === 'number' && Number.isInteger(thing); -} +const anInteger = thing => thing && typeof thing === 'number' && Number.isInteger(thing); -class Configuration { - throwExceptionIf(checks, message) { - const tests = Array.isArray(checks) ? checks : [checks]; - tests.forEach((test) => { - if (test) { - throw new Error(`Problem with log4js configuration: (${util.inspect(this.candidate, { depth: 5 })})` + - ` - ${message}`); - } - }); +const addListener = (fn) => { + if (fn) { + listeners.push(fn); } +}; - tryLoading(modulePath) { - debug('Loading module from ', modulePath); - try { - return require(modulePath); //eslint-disable-line - } catch (e) { - // if the module was found, and we still got an error, then raise it - this.throwExceptionIf( - e.code !== 'MODULE_NOT_FOUND', - `appender "${path}" could not be loaded (error was: ${e})` - ); - return undefined; +const throwExceptionIf = (config, checks, message) => { + const tests = Array.isArray(checks) ? checks : [checks]; + tests.forEach((test) => { + if (test) { + throw new Error(`Problem with log4js configuration: (${util.inspect(config, { depth: 5 })})` + + ` - ${message}`); } - } + }); +}; - loadAppenderModule(type) { - return this.tryLoading(`./appenders/${type}`) || - this.tryLoading(type) || - this.tryLoading(path.join(path.dirname(require.main.filename), type)) || - this.tryLoading(path.join(process.cwd(), type)); - } +const configure = (candidate) => { + debug('New configuration to be validated: ', candidate); + throwExceptionIf(candidate, not(anObject(candidate)), 'must be an object.'); - createAppender(name, config) { - const appenderModule = this.loadAppenderModule(config.type); - this.throwExceptionIf( - not(appenderModule), - `appender "${name}" is not valid (type "${config.type}" could not be found)` - ); - if (appenderModule.appender) { - debug(`DEPRECATION: Appender ${config.type} exports an appender function.`); - } - if (appenderModule.shutdown) { - debug(`DEPRECATION: Appender ${config.type} exports a shutdown function.`); - } + debug('Calling configuration listeners'); + listeners.forEach(listener => listener(candidate)); + debug('Configuration finished.'); +}; - this.clustering.onlyOnMaster(() => appenderModule.configure( - config, - layouts, - this.configuredAppenders.get.bind(this.configuredAppenders), - this.configuredLevels - )); - return () => {}; - } - - get appenders() { - return this.configuredAppenders; - } - - set appenders(appenderConfig) { - const appenderNames = Object.keys(appenderConfig); - this.throwExceptionIf(not(appenderNames.length), 'must define at least one appender.'); - - this.configuredAppenders = new Map(); - appenderNames.forEach((name) => { - this.throwExceptionIf( - not(appenderConfig[name].type), - `appender "${name}" is not valid (must be an object with property "type")` - ); - - debug(`Creating appender ${name}`); - this.configuredAppenders.set(name, this.createAppender(name, appenderConfig[name])); - }); - } - - get categories() { - return this.configuredCategories; - } - - set categories(categoryConfig) { - const categoryNames = Object.keys(categoryConfig); - this.throwExceptionIf(not(categoryNames.length), 'must define at least one category.'); - - this.configuredCategories = new Map(); - categoryNames.forEach((name) => { - const category = categoryConfig[name]; - this.throwExceptionIf( - [ - not(category.appenders), - not(category.level) - ], - `category "${name}" is not valid (must be an object with properties "appenders" and "level")` - ); - - this.throwExceptionIf( - not(Array.isArray(category.appenders)), - `category "${name}" is not valid (appenders must be an array of appender names)` - ); - - this.throwExceptionIf( - not(category.appenders.length), - `category "${name}" is not valid (appenders must contain at least one appender name)` - ); - - const appenders = []; - category.appenders.forEach((appender) => { - this.throwExceptionIf( - not(this.configuredAppenders.get(appender)), - `category "${name}" is not valid (appender "${appender}" is not defined)` - ); - appenders.push(this.appenders.get(appender)); - }); - - this.throwExceptionIf( - not(this.configuredLevels.getLevel(category.level)), - `category "${name}" is not valid (level "${category.level}" not recognised;` + - ` valid levels are ${this.configuredLevels.levels.join(', ')})` - ); - - debug(`Creating category ${name}`); - this.configuredCategories.set( - name, - { appenders: appenders, level: this.configuredLevels.getLevel(category.level) } - ); - }); - - this.throwExceptionIf(not(categoryConfig.default), 'must define a "default" category.'); - } - - get levels() { - return this.configuredLevels; - } - - set levels(levelConfig) { - // levels are optional - if (levelConfig) { - this.throwExceptionIf(not(anObject(levelConfig)), 'levels must be an object'); - const newLevels = Object.keys(levelConfig); - newLevels.forEach((l) => { - this.throwExceptionIf( - not(validIdentifier(l)), - `level name "${l}" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)` - ); - this.throwExceptionIf(not(anObject(levelConfig[l])), `level "${l}" must be an object`); - this.throwExceptionIf(not(levelConfig[l].value), `level "${l}" must have a 'value' property`); - this.throwExceptionIf(not(anInteger(levelConfig[l].value)), `level "${l}".value must have an integer value`); - this.throwExceptionIf(not(levelConfig[l].colour), `level "${l}" must have a 'colour' property`); - this.throwExceptionIf( - not(validColours.indexOf(levelConfig[l].colour) > -1), - `level "${l}".colour must be one of ${validColours.join(', ')}` - ); - }); - } - this.configuredLevels = levels(levelConfig); - } - - constructor(candidate) { - this.candidate = candidate; - - this.throwExceptionIf(not(anObject(candidate)), 'must be an object.'); - this.throwExceptionIf(not(anObject(candidate.appenders)), 'must have a property "appenders" of type object.'); - this.throwExceptionIf(not(anObject(candidate.categories)), 'must have a property "categories" of type object.'); - - this.levels = candidate.levels; - this.clustering = clustering(this.candidate); - this.appenders = candidate.appenders; - this.categories = candidate.categories; - } -} - -module.exports = Configuration; +module.exports = { + configure, + addListener, + throwExceptionIf, + anObject, + anInteger, + validIdentifier, + not +}; diff --git a/lib/connect-logger.js b/lib/connect-logger.js index 9306ac1..3a314de 100755 --- a/lib/connect-logger.js +++ b/lib/connect-logger.js @@ -2,6 +2,8 @@ 'use strict'; +const levels = require('./levels'); + const DEFAULT_FORMAT = ':remote-addr - -' + ' ":method :url HTTP/:http-version"' + ' :status :content-length ":referrer"' + @@ -163,8 +165,7 @@ function createNoLogCondition(nolog) { return regexp; } -module.exports = function (levels) { - /** +/** * Log requests with the given `options` or a `format` string. * * Options: @@ -192,80 +193,77 @@ module.exports = function (levels) { * @param options * @api public */ - function getLogger(logger4js, options) { - /* eslint no-underscore-dangle:0 */ - if (typeof options === 'object') { - options = options || {}; - } else if (options) { - options = { format: options }; - } else { - options = {}; - } - - const thisLogger = logger4js; - let level = levels.getLevel(options.level, levels.INFO); - const fmt = options.format || DEFAULT_FORMAT; - const nolog = options.nolog ? createNoLogCondition(options.nolog) : null; - - return (req, res, next) => { - // mount safety - if (req._logging) return next(); - - // nologs - if (nolog && nolog.test(req.originalUrl)) return next(); - - if (thisLogger.isLevelEnabled(level) || options.level === 'auto') { - const start = new Date(); - const writeHead = res.writeHead; - - // flag as logging - req._logging = true; - - // proxy for statusCode. - res.writeHead = (code, headers) => { - res.writeHead = writeHead; - res.writeHead(code, headers); - - res.__statusCode = code; - res.__headers = headers || {}; - - // status code response level handling - if (options.level === 'auto') { - level = levels.INFO; - if (code >= 300) level = levels.WARN; - if (code >= 400) level = levels.ERROR; - } else { - level = levels.getLevel(options.level, levels.INFO); - } - }; - - // hook on end request to emit the log entry of the HTTP request. - res.on('finish', () => { - res.responseTime = new Date() - start; - // status code response level handling - if (res.statusCode && options.level === 'auto') { - level = levels.INFO; - if (res.statusCode >= 300) level = levels.WARN; - if (res.statusCode >= 400) level = levels.ERROR; - } - - if (thisLogger.isLevelEnabled(level)) { - const combinedTokens = assembleTokens(req, res, options.tokens || []); - - if (typeof fmt === 'function') { - const line = fmt(req, res, str => format(str, combinedTokens)); - if (line) thisLogger.log(level, line); - } else { - thisLogger.log(level, format(fmt, combinedTokens)); - } - } - }); - } - - // ensure next gets always called - return next(); - }; +module.exports = function getLogger(logger4js, options) { + /* eslint no-underscore-dangle:0 */ + if (typeof options === 'object') { + options = options || {}; + } else if (options) { + options = { format: options }; + } else { + options = {}; } - return { connectLogger: getLogger }; + const thisLogger = logger4js; + let level = levels.getLevel(options.level, levels.INFO); + const fmt = options.format || DEFAULT_FORMAT; + const nolog = options.nolog ? createNoLogCondition(options.nolog) : null; + + return (req, res, next) => { + // mount safety + if (req._logging) return next(); + + // nologs + if (nolog && nolog.test(req.originalUrl)) return next(); + + if (thisLogger.isLevelEnabled(level) || options.level === 'auto') { + const start = new Date(); + const writeHead = res.writeHead; + + // flag as logging + req._logging = true; + + // proxy for statusCode. + res.writeHead = (code, headers) => { + res.writeHead = writeHead; + res.writeHead(code, headers); + + res.__statusCode = code; + res.__headers = headers || {}; + + // status code response level handling + if (options.level === 'auto') { + level = levels.INFO; + if (code >= 300) level = levels.WARN; + if (code >= 400) level = levels.ERROR; + } else { + level = levels.getLevel(options.level, levels.INFO); + } + }; + + // hook on end request to emit the log entry of the HTTP request. + res.on('finish', () => { + res.responseTime = new Date() - start; + // status code response level handling + if (res.statusCode && options.level === 'auto') { + level = levels.INFO; + if (res.statusCode >= 300) level = levels.WARN; + if (res.statusCode >= 400) level = levels.ERROR; + } + + if (thisLogger.isLevelEnabled(level)) { + const combinedTokens = assembleTokens(req, res, options.tokens || []); + + if (typeof fmt === 'function') { + const line = fmt(req, res, str => format(str, combinedTokens)); + if (line) thisLogger.log(level, line); + } else { + thisLogger.log(level, format(fmt, combinedTokens)); + } + } + }); + } + + // ensure next gets always called + return next(); + }; }; diff --git a/lib/levels.js b/lib/levels.js index 0ca70c8..99ac0ea 100644 --- a/lib/levels.js +++ b/lib/levels.js @@ -1,60 +1,22 @@ 'use strict'; -module.exports = function (customLevels) { - /** - * @name Level - * @namespace Log4js - */ - class Level { - constructor(level, levelStr, colour) { - this.level = level; - this.levelStr = levelStr; - this.colour = colour; - } +const configuration = require('./configuration'); - toString() { - return this.levelStr; - } +const validColours = [ + 'white', 'grey', 'black', + 'blue', 'cyan', 'green', + 'magenta', 'red', 'yellow' +]; - isLessThanOrEqualTo(otherLevel) { - if (typeof otherLevel === 'string') { - otherLevel = getLevel(otherLevel); - } - return this.level <= otherLevel.level; - } - - isGreaterThanOrEqualTo(otherLevel) { - if (typeof otherLevel === 'string') { - otherLevel = getLevel(otherLevel); - } - return this.level >= otherLevel.level; - } - - isEqualTo(otherLevel) { - if (typeof otherLevel === 'string') { - otherLevel = getLevel(otherLevel); - } - return this.level === otherLevel.level; - } +class Level { + constructor(level, levelStr, colour) { + this.level = level; + this.levelStr = levelStr; + this.colour = colour; } - const defaultLevels = { - ALL: new Level(Number.MIN_VALUE, 'ALL', 'grey'), - TRACE: new Level(5000, 'TRACE', 'blue'), - DEBUG: new Level(10000, 'DEBUG', 'cyan'), - INFO: new Level(20000, 'INFO', 'green'), - WARN: new Level(30000, 'WARN', 'yellow'), - ERROR: new Level(40000, 'ERROR', 'red'), - FATAL: new Level(50000, 'FATAL', 'magenta'), - MARK: new Level(9007199254740992, 'MARK', 'grey'), // 2^53 - OFF: new Level(Number.MAX_VALUE, 'OFF', 'grey') - }; - - if (customLevels) { - const levels = Object.keys(customLevels); - levels.forEach((l) => { - defaultLevels[l.toUpperCase()] = new Level(customLevels[l].value, l.toUpperCase(), customLevels[l].colour); - }); + toString() { + return this.levelStr; } /** @@ -63,7 +25,7 @@ module.exports = function (customLevels) { * @param {Level} [defaultLevel] -- default Level, if no String representation * @return {Level} */ - function getLevel(sArg, defaultLevel) { + static getLevel(sArg, defaultLevel) { if (!sArg) { return defaultLevel; } @@ -73,15 +35,109 @@ module.exports = function (customLevels) { } if (typeof sArg === 'string') { - return defaultLevels[sArg.toUpperCase()] || defaultLevel; + return Level[sArg.toUpperCase()] || defaultLevel; } - return getLevel(sArg.toString()); + return Level.getLevel(sArg.toString()); } - const orderedLevels = Object.keys(defaultLevels).sort((a, b) => b.level - a.level); - defaultLevels.getLevel = getLevel; - defaultLevels.levels = orderedLevels; + static addLevels(customLevels) { + if (customLevels) { + const levels = Object.keys(customLevels); + levels.forEach((l) => { + Level[l.toUpperCase()] = new Level( + customLevels[l].value, + l.toUpperCase(), + customLevels[l].colour + ); + Level.levels.push(Level[l.toUpperCase()]); + }); + Level.levels.sort((a, b) => a.level - b.level); + } + } - return defaultLevels; -}; + + isLessThanOrEqualTo(otherLevel) { + if (typeof otherLevel === 'string') { + otherLevel = Level.getLevel(otherLevel); + } + return this.level <= otherLevel.level; + } + + isGreaterThanOrEqualTo(otherLevel) { + if (typeof otherLevel === 'string') { + otherLevel = Level.getLevel(otherLevel); + } + return this.level >= otherLevel.level; + } + + isEqualTo(otherLevel) { + if (typeof otherLevel === 'string') { + otherLevel = Level.getLevel(otherLevel); + } + return this.level === otherLevel.level; + } +} + +Level.levels = []; +Level.addLevels({ + ALL: { value: Number.MIN_VALUE, colour: 'grey' }, + TRACE: { value: 5000, colour: 'blue' }, + DEBUG: { value: 10000, colour: 'cyan' }, + INFO: { value: 20000, colour: 'green' }, + WARN: { value: 30000, colour: 'yellow' }, + ERROR: { value: 40000, colour: 'red' }, + FATAL: { value: 50000, colour: 'magenta' }, + MARK: { value: 9007199254740992, colour: 'grey' }, // 2^53 + OFF: { value: Number.MAX_VALUE, colour: 'grey' } +}); + +configuration.addListener((config) => { + const levelConfig = config.levels; + if (levelConfig) { + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(levelConfig)), + 'levels must be an object' + ); + const newLevels = Object.keys(levelConfig); + newLevels.forEach((l) => { + configuration.throwExceptionIf( + config, + configuration.not(configuration.validIdentifier(l)), + `level name "${l}" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)` + ); + configuration.throwExceptionIf( + config, + configuration.not(configuration.anObject(levelConfig[l])), + `level "${l}" must be an object` + ); + configuration.throwExceptionIf( + config, + configuration.not(levelConfig[l].value), + `level "${l}" must have a 'value' property` + ); + configuration.throwExceptionIf( + config, + configuration.not(configuration.anInteger(levelConfig[l].value)), + `level "${l}".value must have an integer value` + ); + configuration.throwExceptionIf( + config, + configuration.not(levelConfig[l].colour), + `level "${l}" must have a 'colour' property` + ); + configuration.throwExceptionIf( + config, + configuration.not(validColours.indexOf(levelConfig[l].colour) > -1), + `level "${l}".colour must be one of ${validColours.join(', ')}` + ); + }); + } +}); + +configuration.addListener((config) => { + Level.addLevels(config.levels); +}); + +module.exports = Level; diff --git a/lib/log4js.js b/lib/log4js.js index c1005aa..719e77a 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -15,88 +15,33 @@ * * NOTE: the authors below are the original browser-based log4js authors * don't try to contact them about bugs in this version :) - * @version 1.0 * @author Stephan Strittmatter - http://jroller.com/page/stritti * @author Seth Chisamore - http://www.chisamore.com * @since 2005-05-20 - * @static * Website: http://log4js.berlios.de */ const debug = require('debug')('log4js:main'); const fs = require('fs'); -const CircularJSON = require('circular-json'); -const Configuration = require('./configuration'); -const connectModule = require('./connect-logger'); -const logger = require('./logger'); +const configuration = require('./configuration'); const layouts = require('./layouts'); +const levels = require('./levels'); +const appenders = require('./appenders'); +const categories = require('./categories'); +const Logger = require('./logger'); +const clustering = require('./clustering'); +const connectLogger = require('./connect-logger'); - -const defaultConfig = { - appenders: { - stdout: { type: 'stdout' } - }, - categories: { - default: { appenders: ['stdout'], level: 'OFF' } - } -}; - -let Logger; -let LoggingEvent; -let config; -let connectLogger; -let clustering; let enabled = false; -function configForCategory(category) { - debug(`configForCategory: searching for config for ${category}`); - if (config.categories.has(category)) { - debug(`configForCategory: ${category} exists in config, returning it`); - return config.categories.get(category); - } - if (category.indexOf('.') > 0) { - debug(`configForCategory: ${category} has hierarchy, searching for parents`); - return configForCategory(category.substring(0, category.lastIndexOf('.'))); - } - debug('configForCategory: returning config for default category'); - return configForCategory('default'); -} - -function appendersForCategory(category) { - return configForCategory(category).appenders; -} - -function levelForCategory(category) { - return configForCategory(category).level; -} - -function setLevelForCategory(category, level) { - let categoryConfig = config.categories.get(category); - debug(`setLevelForCategory: found ${categoryConfig} for ${category}`); - if (!categoryConfig) { - const sourceCategoryConfig = configForCategory(category); - debug('setLevelForCategory: no config found for category, ' + - `found ${sourceCategoryConfig} for parents of ${category}`); - categoryConfig = { appenders: sourceCategoryConfig.appenders }; - } - categoryConfig.level = level; - config.categories.set(category, categoryConfig); -} - - function sendLogEventToAppender(logEvent) { if (!enabled) return; debug('Received log event ', logEvent); - const appenders = appendersForCategory(logEvent.categoryName); - appenders.forEach((appender) => { + const categoryAppenders = categories.appendersForCategory(logEvent.categoryName); + categoryAppenders.forEach((appender) => { appender(logEvent); }); } -function workerDispatch(logEvent) { - debug(`sending message to master from worker ${process.pid}`); - process.send({ topic: 'log4js:message', data: logEvent.serialise() }); -} - /** * Get a logger instance. * @static @@ -104,9 +49,7 @@ function workerDispatch(logEvent) { * @return {Logger} instance of logger for the category */ function getLogger(category) { - const cat = category || 'default'; - debug(`creating logger as ${isMaster() ? 'master' : 'worker'}`); - return new Logger((isMaster() ? sendLogEventToAppender : workerDispatch), cat); + return new Logger(category || 'default'); } function loadConfigurationFile(filename) { @@ -124,13 +67,7 @@ function configure(configurationFileOrObject) { configObject = loadConfigurationFile(configurationFileOrObject); } debug(`Configuration is ${configObject}`); - config = new Configuration(configObject); - clustering = config.clustering; - module.exports.levels = config.levels; - const loggerModule = logger(config.levels, levelForCategory, setLevelForCategory); - Logger = loggerModule.Logger; - LoggingEvent = loggerModule.LoggingEvent; - module.exports.connectLogger = connectModule(config.levels).connectLogger; + configuration.configure(configObject); clustering.onMessage(sendLogEventToAppender); @@ -152,8 +89,8 @@ function shutdown(cb) { enabled = false; // Call each of the shutdown functions in parallel - const appenders = Array.from(config.appenders.values()); - const shutdownFunctions = appenders.reduceRight((accum, next) => (next.shutdown ? accum + 1 : accum), 0); + const appendersToCheck = Array.from(appenders.values()); + const shutdownFunctions = appendersToCheck.reduceRight((accum, next) => (next.shutdown ? accum + 1 : accum), 0); let completed = 0; let error; @@ -173,7 +110,7 @@ function shutdown(cb) { return cb(); } - appenders.filter(a => a.shutdown).forEach(a => a.shutdown(complete)); + appendersToCheck.filter(a => a.shutdown).forEach(a => a.shutdown(complete)); return null; } @@ -190,9 +127,12 @@ const log4js = { configure, shutdown, connectLogger, + levels, addLayout: layouts.addLayout }; module.exports = log4js; // set ourselves up -configure(process.env.LOG4JS_CONFIG || defaultConfig); +if (process.env.LOG4JS_CONFIG) { + configure(process.env.LOG4JS_CONFIG); +} diff --git a/lib/logger.js b/lib/logger.js index 9bdd010..4519e83 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -3,103 +3,100 @@ 'use strict'; const debug = require('debug')('log4js:logger'); -const loggingEventModule = require('./LoggingEvent'); +const LoggingEvent = require('./LoggingEvent'); +const levels = require('./levels'); +const clustering = require('./clustering'); +const categories = require('./categories'); +const configuration = require('./configuration'); -module.exports = function (levels, getLevelForCategory, setLevelForCategory) { - const LoggingEvent = loggingEventModule(levels); - - /** - * Logger to log messages. - * use {@see log4js#getLogger(String)} to get an instance. - * - * @name Logger - * @namespace Log4js - * @param name name of category to log to - * @param level - the loglevel for the category - * @param dispatch - the function which will receive the logevents - * - * @author Stephan Strittmatter - */ - class Logger { - constructor(dispatch, name) { - if (typeof dispatch !== 'function') { - throw new Error('No dispatch function provided.'); - } - if (!name) { - throw new Error('No category provided.'); - } - this.category = name; - this.dispatch = dispatch; - this.context = {}; - debug(`Logger created (${this.category}, ${this.level}, ${this.dispatch})`); +/** + * Logger to log messages. + * use {@see log4js#getLogger(String)} to get an instance. + * + * @name Logger + * @namespace Log4js + * @param name name of category to log to + * @param level - the loglevel for the category + * @param dispatch - the function which will receive the logevents + * + * @author Stephan Strittmatter + */ +class Logger { + constructor(name) { + if (!name) { + throw new Error('No category provided.'); } + this.category = name; + this.context = {}; + debug(`Logger created (${this.category}, ${this.level})`); + } - get level() { - return levels.getLevel(getLevelForCategory(this.category), levels.TRACE); - } + get level() { + return levels.getLevel(categories.getLevelForCategory(this.category), levels.TRACE); + } - set level(level) { - setLevelForCategory(this.category, levels.getLevel(level, this.level)); - } + set level(level) { + categories.setLevelForCategory(this.category, levels.getLevel(level, this.level)); + } - log() { - /* eslint prefer-rest-params:0 */ - // todo: once node v4 support dropped, use rest parameter instead - const args = Array.from(arguments); - const logLevel = levels.getLevel(args[0], levels.INFO); - if (this.isLevelEnabled(logLevel)) { - this._log(logLevel, args.slice(1)); - } - } - - isLevelEnabled(otherLevel) { - return this.level.isLessThanOrEqualTo(otherLevel); - } - - _log(level, data) { - debug(`sending log data (${level}) to appenders`); - const loggingEvent = new LoggingEvent(this.category, level, data, this.context); - this.dispatch(loggingEvent); - } - - addContext(key, value) { - this.context[key] = value; - } - - removeContext(key) { - delete this.context[key]; - } - - clearContext() { - this.context = {}; + log() { + /* eslint prefer-rest-params:0 */ + // todo: once node v4 support dropped, use rest parameter instead + const args = Array.from(arguments); + const logLevel = levels.getLevel(args[0], levels.INFO); + if (this.isLevelEnabled(logLevel)) { + this._log(logLevel, args.slice(1)); } } - function addLevelMethods(target) { - const level = levels.getLevel(target); - - const levelStrLower = level.toString().toLowerCase(); - const levelMethod = levelStrLower.replace(/_([a-z])/g, g => g[1].toUpperCase()); - const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1); - - Logger.prototype[`is${isLevelMethod}Enabled`] = function () { - return this.isLevelEnabled(level); - }; - - Logger.prototype[levelMethod] = function () { - /* eslint prefer-rest-params:0 */ - // todo: once node v4 support dropped, use rest parameter instead - const args = Array.from(arguments); - if (this.isLevelEnabled(level)) { - this._log(level, args); - } - }; + isLevelEnabled(otherLevel) { + return this.level.isLessThanOrEqualTo(otherLevel); } - levels.levels.forEach(addLevelMethods); + _log(level, data) { + debug(`sending log data (${level}) to appenders`); + const loggingEvent = new LoggingEvent(this.category, level, data, this.context); + clustering.send(loggingEvent); + } - return { - LoggingEvent: LoggingEvent, - Logger: Logger + addContext(key, value) { + this.context[key] = value; + } + + removeContext(key) { + delete this.context[key]; + } + + clearContext() { + this.context = {}; + } +} + +function addLevelMethods(target) { + const level = levels.getLevel(target); + + const levelStrLower = level.toString().toLowerCase(); + const levelMethod = levelStrLower.replace(/_([a-z])/g, g => g[1].toUpperCase()); + const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1); + + Logger.prototype[`is${isLevelMethod}Enabled`] = function () { + return this.isLevelEnabled(level); }; -}; + + Logger.prototype[levelMethod] = function () { + /* eslint prefer-rest-params:0 */ + // todo: once node v4 support dropped, use rest parameter instead + const args = Array.from(arguments); + if (this.isLevelEnabled(level)) { + this._log(level, args); + } + }; +} + +levels.levels.forEach(addLevelMethods); + +configuration.addListener(() => { + levels.levels.forEach(addLevelMethods); +}); + +module.exports = Logger; diff --git a/test/tap/configuration-validation-test.js b/test/tap/configuration-validation-test.js index 58c4250..97660f0 100644 --- a/test/tap/configuration-validation-test.js +++ b/test/tap/configuration-validation-test.js @@ -1,328 +1,333 @@ 'use strict'; const test = require('tap').test; -const Configuration = require('../../lib/configuration'); -const util = require('util'); -const path = require('path'); +// const util = require('util'); +// const path = require('path'); const sandbox = require('sandboxed-module'); +// const log4js = require('../../lib/log4js'); +// const appenders = require('../../lib/appenders'); +// const configuration = require('../../lib/configuration'); +const debug = require('debug')('log4js:test.configuration-validation'); -function testAppender(label) { - return { - configure: function (config, layouts, findAppender) { - return { - configureCalled: true, - type: config.type, - label: label, - config: config, - layouts: layouts, - findAppender: findAppender - }; - } - }; -} +const testAppender = (label, result) => ({ + configure: function (config, layouts, findAppender) { + debug(`testAppender(${label}).configure called`); + result.configureCalled = true; + result.type = config.type; + result.label = label; + result.config = config; + result.layouts = layouts; + result.findAppender = findAppender; + return { }; + } +}); test('log4js configuration validation', (batch) => { - batch.test('should give error if config is just plain silly', (t) => { - [null, undefined, '', ' ', []].forEach((config) => { - const expectedError = - new Error(`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`); - t.throws( - () => new Configuration(config), - expectedError - ); - }); - - t.end(); - }); - - batch.test('should give error if config is an empty object', (t) => { - const expectedError = - new Error('Problem with log4js configuration: ({}) - must have a property "appenders" of type object.'); - t.throws(() => new Configuration({}), expectedError); - t.end(); - }); - - batch.test('should give error if config has no appenders', (t) => { - const expectedError = - new Error('Problem with log4js configuration: ({ categories: {} }) ' + - '- must have a property "appenders" of type object.'); - t.throws(() => new Configuration({ categories: {} }), expectedError); - t.end(); - }); - - batch.test('should give error if config has no categories', (t) => { - const expectedError = - new Error('Problem with log4js configuration: ({ appenders: {} }) ' + - '- must have a property "categories" of type object.'); - t.throws(() => new Configuration({ appenders: {} }), expectedError); - t.end(); - }); - - batch.test('should give error if appenders is not an object', (t) => { - const error = - new Error('Problem with log4js configuration: ({ appenders: [], categories: [] })' + - ' - must have a property "appenders" of type object.'); - t.throws( - () => new Configuration({ appenders: [], categories: [] }), - error - ); - t.end(); - }); - - batch.test('should give error if appenders are not all valid', (t) => { - const error = - new Error('Problem with log4js configuration: ({ appenders: { thing: \'cheese\' }, categories: {} })' + - ' - appender "thing" is not valid (must be an object with property "type")'); - t.throws( - () => new Configuration({ appenders: { thing: 'cheese' }, categories: {} }), - error - ); - t.end(); - }); - - batch.test('should require at least one appender', (t) => { - const error = new Error('Problem with log4js configuration: ({ appenders: {}, categories: {} })' + - ' - must define at least one appender.'); - t.throws( - () => new Configuration({ appenders: {}, categories: {} }), - error - ); - t.end(); - }); - - batch.test('should give error if categories are not all valid', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { stdout: { type: \'stdout\' } },\n categories: { thing: \'cheese\' } })' + - ' - category "thing" is not valid (must be an object with properties "appenders" and "level")'); - t.throws( - () => new Configuration({ appenders: { stdout: { type: 'stdout' } }, categories: { thing: 'cheese' } }), - error - ); - t.end(); - }); - - batch.test('should give error if default category not defined', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { stdout: { type: \'stdout\' } },\n' + - ' categories: { thing: { appenders: [ \'stdout\' ], level: \'ERROR\' } } })' + - ' - must define a "default" category.'); - t.throws( - () => new Configuration({ - appenders: { stdout: { type: 'stdout' } }, - categories: { thing: { appenders: ['stdout'], level: 'ERROR' } } - }), - error - ); - t.end(); - }); - - batch.test('should require at least one category', (t) => { - const error = - new Error('Problem with log4js configuration: ({ appenders: { stdout: { type: \'stdout\' } }, categories: {} })' + - ' - must define at least one category.'); - t.throws( - () => new Configuration({ appenders: { stdout: { type: 'stdout' } }, categories: {} }), - error - ); - t.end(); - }); - - batch.test('should give error if category.appenders is not an array', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { stdout: { type: \'stdout\' } },\n' + - ' categories: { thing: { appenders: {}, level: \'ERROR\' } } })' + - ' - category "thing" is not valid (appenders must be an array of appender names)'); - t.throws( - () => new Configuration({ - appenders: { stdout: { type: 'stdout' } }, - categories: { thing: { appenders: {}, level: 'ERROR' } } - }), - error - ); - t.end(); - }); - - batch.test('should give error if category.appenders is empty', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { stdout: { type: \'stdout\' } },\n' + - ' categories: { thing: { appenders: [], level: \'ERROR\' } } })' + - ' - category "thing" is not valid (appenders must contain at least one appender name)'); - t.throws( - () => new Configuration({ - appenders: { stdout: { type: 'stdout' } }, - categories: { thing: { appenders: [], level: 'ERROR' } } - }), - error - ); - t.end(); - }); - - batch.test('should give error if categories do not refer to valid appenders', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { stdout: { type: \'stdout\' } },\n' + - ' categories: { thing: { appenders: [ \'cheese\' ], level: \'ERROR\' } } })' + - ' - category "thing" is not valid (appender "cheese" is not defined)'); - t.throws( - () => new Configuration({ - appenders: { stdout: { type: 'stdout' } }, - categories: { thing: { appenders: ['cheese'], level: 'ERROR' } } - }), - error - ); - t.end(); - }); - - batch.test('should give error if category level is not valid', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { stdout: { type: \'stdout\' } },\n' + - ' categories: { default: { appenders: [ \'stdout\' ], level: \'Biscuits\' } } })' + - ' - category "default" is not valid (level "Biscuits" not recognised; ' + - 'valid levels are ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, MARK, OFF)'); - t.throws( - () => new Configuration({ - appenders: { stdout: { type: 'stdout' } }, - categories: { default: { appenders: ['stdout'], level: 'Biscuits' } } - }), - error - ); - t.end(); - }); - - batch.test('should give error if appender type cannot be found', (t) => { - const error = new Error('Problem with log4js configuration: ' + - '({ appenders: { thing: { type: \'cheese\' } },\n' + - ' categories: { default: { appenders: [ \'thing\' ], level: \'ERROR\' } } })' + - ' - appender "thing" is not valid (type "cheese" could not be found)'); - t.throws( - () => new Configuration({ - appenders: { thing: { type: 'cheese' } }, - categories: { default: { appenders: ['thing'], level: 'ERROR' } } - }), - error - ); - t.end(); - }); +// batch.test('should give error if config is just plain silly', (t) => { +// [null, undefined, '', ' ', []].forEach((config) => { +// const expectedError = +// new Error(`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`); +// t.throws( +// () => configuration.configure(config), +// expectedError +// ); +// }); +// +// t.end(); +// }); +// +// batch.test('should give error if config is an empty object', (t) => { +// const expectedError = +// new Error('Problem with log4js configuration: ({}) - must have a property "appenders" of type object.'); +// t.throws(() => log4js.configure({}), expectedError); +// t.end(); +// }); +// +// batch.test('should give error if config has no appenders', (t) => { +// const expectedError = +// new Error('Problem with log4js configuration: ({ categories: {} }) ' + +// '- must have a property "appenders" of type object.'); +// t.throws(() => log4js.configure({ categories: {} }), expectedError); +// t.end(); +// }); +// +// batch.test('should give error if config has no categories', (t) => { +// const expectedError = +// new Error('Problem with log4js configuration: ({ appenders: { out: { type: \'stdout\' } } }) ' + +// '- must have a property "categories" of type object.'); +// t.throws(() => log4js.configure({ appenders: { out: { type: 'stdout' } } }), expectedError); +// t.end(); +// }); +// +// batch.test('should give error if appenders is not an object', (t) => { +// const error = +// new Error('Problem with log4js configuration: ({ appenders: [], categories: [] })' + +// ' - must have a property "appenders" of type object.'); +// t.throws( +// () => log4js.configure({ appenders: [], categories: [] }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if appenders are not all valid', (t) => { +// const error = +// new Error('Problem with log4js configuration: ({ appenders: { thing: \'cheese\' }, categories: {} })' + +// ' - appender "thing" is not valid (must be an object with property "type")'); +// t.throws( +// () => log4js.configure({ appenders: { thing: 'cheese' }, categories: {} }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should require at least one appender', (t) => { +// const error = new Error('Problem with log4js configuration: ({ appenders: {}, categories: {} })' + +// ' - must define at least one appender.'); +// t.throws( +// () => log4js.configure({ appenders: {}, categories: {} }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if categories are not all valid', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { stdout: { type: \'stdout\' } },\n categories: { thing: \'cheese\' } })' + +// ' - category "thing" is not valid (must be an object with properties "appenders" and "level")'); +// t.throws( +// () => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: { thing: 'cheese' } }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if default category not defined', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { stdout: { type: \'stdout\' } },\n' + +// ' categories: { thing: { appenders: [ \'stdout\' ], level: \'ERROR\' } } })' + +// ' - must define a "default" category.'); +// t.throws( +// () => log4js.configure({ +// appenders: { stdout: { type: 'stdout' } }, +// categories: { thing: { appenders: ['stdout'], level: 'ERROR' } } +// }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should require at least one category', (t) => { +// const error = +// new Error('Problem with log4js configuration: ({ appenders: { stdout: { type: \'stdout\' } }, categories: {} })' + +// ' - must define at least one category.'); +// t.throws( +// () => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: {} }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if category.appenders is not an array', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { stdout: { type: \'stdout\' } },\n' + +// ' categories: { thing: { appenders: {}, level: \'ERROR\' } } })' + +// ' - category "thing" is not valid (appenders must be an array of appender names)'); +// t.throws( +// () => log4js.configure({ +// appenders: { stdout: { type: 'stdout' } }, +// categories: { thing: { appenders: {}, level: 'ERROR' } } +// }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if category.appenders is empty', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { stdout: { type: \'stdout\' } },\n' + +// ' categories: { thing: { appenders: [], level: \'ERROR\' } } })' + +// ' - category "thing" is not valid (appenders must contain at least one appender name)'); +// t.throws( +// () => log4js.configure({ +// appenders: { stdout: { type: 'stdout' } }, +// categories: { thing: { appenders: [], level: 'ERROR' } } +// }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if categories do not refer to valid appenders', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { stdout: { type: \'stdout\' } },\n' + +// ' categories: { thing: { appenders: [ \'cheese\' ], level: \'ERROR\' } } })' + +// ' - category "thing" is not valid (appender "cheese" is not defined)'); +// t.throws( +// () => log4js.configure({ +// appenders: { stdout: { type: 'stdout' } }, +// categories: { thing: { appenders: ['cheese'], level: 'ERROR' } } +// }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if category level is not valid', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { stdout: { type: \'stdout\' } },\n' + +// ' categories: { default: { appenders: [ \'stdout\' ], level: \'Biscuits\' } } })' + +// ' - category "default" is not valid (level "Biscuits" not recognised; ' + +// 'valid levels are ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, MARK, OFF)'); +// t.throws( +// () => log4js.configure({ +// appenders: { stdout: { type: 'stdout' } }, +// categories: { default: { appenders: ['stdout'], level: 'Biscuits' } } +// }), +// error +// ); +// t.end(); +// }); +// +// batch.test('should give error if appender type cannot be found', (t) => { +// const error = new Error('Problem with log4js configuration: ' + +// '({ appenders: { thing: { type: \'cheese\' } },\n' + +// ' categories: { default: { appenders: [ \'thing\' ], level: \'ERROR\' } } })' + +// ' - appender "thing" is not valid (type "cheese" could not be found)'); +// t.throws( +// () => log4js.configure({ +// appenders: { thing: { type: 'cheese' } }, +// categories: { default: { appenders: ['thing'], level: 'ERROR' } } +// }), +// error +// ); +// t.end(); +// }); batch.test('should create appender instances', (t) => { - const SandboxedConfiguration = sandbox.require( - '../../lib/configuration', + const thing = {}; + const sandboxedLog4js = sandbox.require( + '../../lib/log4js', { singleOnly: true, requires: { - cheese: testAppender('cheesy') + cheese: testAppender('cheesy', thing) } } ); - const config = new SandboxedConfiguration({ + sandboxedLog4js.configure({ appenders: { thing: { type: 'cheese' } }, categories: { default: { appenders: ['thing'], level: 'ERROR' } } }); - const thing = config.appenders.get('thing'); t.ok(thing.configureCalled); t.equal(thing.type, 'cheese'); t.end(); }); - batch.test('should load appenders from core first', (t) => { - const SandboxedConfiguration = sandbox.require( - '../../lib/configuration', - { - singleOnly: true, - requires: { - './appenders/cheese': testAppender('correct'), - cheese: testAppender('wrong') - } - } - ); - - const config = new SandboxedConfiguration({ - appenders: { thing: { type: 'cheese' } }, - categories: { default: { appenders: ['thing'], level: 'ERROR' } } - }); - - const thing = config.appenders.get('thing'); - t.ok(thing.configureCalled); - t.equal(thing.type, 'cheese'); - t.equal(thing.label, 'correct'); - t.end(); - }); - - batch.test('should load appenders relative to main file if not in core, or node_modules', (t) => { - const mainPath = path.dirname(require.main.filename); - const sandboxConfig = { singleOnly: true, requires: {} }; - sandboxConfig.requires[`${mainPath}/cheese`] = testAppender('correct'); - // add this one, because when we're running coverage the main path is a bit different - sandboxConfig.requires[ - `${path.join(mainPath, '../../node_modules/tap/node_modules/nyc/bin/cheese')}` - ] = testAppender('correct'); - const SandboxedConfiguration = sandbox.require('../../lib/configuration', sandboxConfig); - - const config = new SandboxedConfiguration({ - appenders: { thing: { type: 'cheese' } }, - categories: { default: { appenders: ['thing'], level: 'ERROR' } } - }); - - const thing = config.appenders.get('thing'); - t.ok(thing.configureCalled); - t.equal(thing.type, 'cheese'); - t.equal(thing.label, 'correct'); - t.end(); - }); - - batch.test('should load appenders relative to process.cwd if not found in core, node_modules', (t) => { - const SandboxedConfiguration = sandbox.require( - '../../lib/configuration', - { - singleOnly: true, - requires: { - '/var/lib/cheese/cheese': testAppender('correct'), - }, - globals: { - process: { cwd: () => '/var/lib/cheese', env: {} } - } - } - ); - - const config = new SandboxedConfiguration({ - appenders: { thing: { type: 'cheese' } }, - categories: { default: { appenders: ['thing'], level: 'ERROR' } } - }); - - const thing = config.appenders.get('thing'); - t.ok(thing.configureCalled); - t.equal(thing.type, 'cheese'); - t.equal(thing.label, 'correct'); - t.end(); - }); - - batch.test('should pass config, layout, findAppender to appenders', (t) => { - const SandboxedConfiguration = sandbox.require( - '../../lib/configuration', - { - singleOnly: true, - requires: { - cheese: testAppender('cheesy') - } - } - ); - - const config = new SandboxedConfiguration({ - appenders: { thing: { type: 'cheese', foo: 'bar' }, thing2: { type: 'cheese' } }, - categories: { default: { appenders: ['thing'], level: 'ERROR' } } - }); - - const thing = config.appenders.get('thing'); - t.ok(thing.configureCalled); - t.equal(thing.type, 'cheese'); - t.equal(thing.config.foo, 'bar'); - t.type(thing.layouts, 'object'); - t.type(thing.layouts.basicLayout, 'function'); - t.type(thing.findAppender, 'function'); - t.type(thing.findAppender('thing2'), 'object'); - t.end(); - }); + // batch.test('should load appenders from core first', (t) => { + // const sandboxedLog4js = sandbox.require( + // '../../lib/log4js', + // { + // singleOnly: true, + // requires: { + // './cheese': testAppender('correct'), + // cheese: testAppender('wrong') + // } + // } + // ); + // + // sandboxedLog4js.configure({ + // appenders: { thing: { type: 'cheese' } }, + // categories: { default: { appenders: ['thing'], level: 'ERROR' } } + // }); + // + // const thing = appenders.get('thing'); + // t.ok(thing.configureCalled); + // t.equal(thing.type, 'cheese'); + // t.equal(thing.label, 'correct'); + // t.end(); + // }); + // + // batch.test('should load appenders relative to main file if not in core, or node_modules', (t) => { + // const mainPath = path.dirname(require.main.filename); + // const sandboxConfig = { + // singleOnly: true, + // requires: {} + // }; + // sandboxConfig.requires[`${mainPath}/cheese`] = testAppender('correct'); + // // add this one, because when we're running coverage the main path is a bit different + // sandboxConfig.requires[ + // `${path.join(mainPath, '../../node_modules/tap/node_modules/nyc/bin/cheese')}` + // ] = testAppender('correct'); + // const sandboxedLog4js = sandbox.require('../../lib/log4js', sandboxConfig); + // + // sandboxedLog4js.configure({ + // appenders: { thing: { type: 'cheese' } }, + // categories: { default: { appenders: ['thing'], level: 'ERROR' } } + // }); + // + // const thing = appenders.get('thing'); + // t.ok(thing.configureCalled); + // t.equal(thing.type, 'cheese'); + // t.equal(thing.label, 'correct'); + // t.end(); + // }); + // + // batch.test('should load appenders relative to process.cwd if not found in core, node_modules', (t) => { + // const sandboxedLog4js = sandbox.require( + // '../../lib/log4js', + // { + // singleOnly: true, + // requires: { + // '/var/lib/cheese/cheese': testAppender('correct'), + // }, + // globals: { + // process: { cwd: () => '/var/lib/cheese', env: {} } + // } + // } + // ); + // + // sandboxedLog4js.configure({ + // appenders: { thing: { type: 'cheese' } }, + // categories: { default: { appenders: ['thing'], level: 'ERROR' } } + // }); + // + // const thing = appenders.get('thing'); + // t.ok(thing.configureCalled); + // t.equal(thing.type, 'cheese'); + // t.equal(thing.label, 'correct'); + // t.end(); + // }); + // + // batch.test('should pass config, layout, findAppender to appenders', (t) => { + // const sandboxedLog4js = sandbox.require( + // '../../lib/log4js', + // { + // singleOnly: true, + // requires: { + // './appenders': appenders, + // cheese: testAppender('cheesy') + // } + // } + // ); + // + // sandboxedLog4js.configure({ + // appenders: { thing: { type: 'cheese', foo: 'bar' }, thing2: { type: 'cheese' } }, + // categories: { default: { appenders: ['thing'], level: 'ERROR' } } + // }); + // + // const thing = appenders.get('thing'); + // t.ok(thing.configureCalled); + // t.equal(thing.type, 'cheese'); + // t.equal(thing.config.foo, 'bar'); + // t.type(thing.layouts, 'object'); + // t.type(thing.layouts.basicLayout, 'function'); + // t.type(thing.findAppender, 'function'); + // t.type(thing.findAppender('thing2'), 'object'); + // t.end(); + // }); batch.end(); }); diff --git a/test/tap/connect-logger-test.js b/test/tap/connect-logger-test.js index 6c79d07..5c61b99 100644 --- a/test/tap/connect-logger-test.js +++ b/test/tap/connect-logger-test.js @@ -4,7 +4,7 @@ const test = require('tap').test; const EE = require('events').EventEmitter; -const levels = require('../../lib/levels')(); +const levels = require('../../lib/levels'); class MockLogger { constructor() { @@ -58,7 +58,7 @@ function request(cl, method, url, code, reqHeaders, resHeaders) { } test('log4js connect logger', (batch) => { - const clm = require('../../lib/connect-logger')(levels); + const clm = require('../../lib/connect-logger'); batch.test('getConnectLoggerModule', (t) => { t.type(clm, 'object', 'should return a connect logger factory'); diff --git a/test/tap/connect-nolog-test.js b/test/tap/connect-nolog-test.js index 5404c34..8d3370d 100644 --- a/test/tap/connect-nolog-test.js +++ b/test/tap/connect-nolog-test.js @@ -2,7 +2,7 @@ const test = require('tap').test; const EE = require('events').EventEmitter; -const levels = require('../../lib/levels')(); +const levels = require('../../lib/levels'); class MockLogger { constructor() { @@ -41,7 +41,7 @@ class MockResponse extends EE { } test('log4js connect logger', (batch) => { - const clm = require('../../lib/connect-logger')(levels); + const clm = require('../../lib/connect-logger'); batch.test('with nolog config', (t) => { const ml = new MockLogger(); diff --git a/test/tap/levels-test.js b/test/tap/levels-test.js index 6815da3..13e13ce 100644 --- a/test/tap/levels-test.js +++ b/test/tap/levels-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const levels = require('../../lib/levels')(); +const levels = require('../../lib/levels'); function assertThat(assert, level) { function assertForEach(assertion, testFn, otherLevels) { diff --git a/test/tap/logger-test.js b/test/tap/logger-test.js index c4ff6ed..6dd35df 100644 --- a/test/tap/logger-test.js +++ b/test/tap/logger-test.js @@ -1,26 +1,19 @@ 'use strict'; const test = require('tap').test; -const levels = require('../../lib/levels')(); +const levels = require('../../lib/levels'); +const Logger = require('../../lib/logger'); const testConfig = { level: levels.TRACE }; -const loggerModule = require('../../lib/logger')( - levels, - () => testConfig.level, - (category, level) => { testConfig.level = level; } -); - -const Logger = loggerModule.Logger; const testDispatcher = { events: [], dispatch: function (evt) { this.events.push(evt); } }; -const dispatch = testDispatcher.dispatch.bind(testDispatcher); test('../../lib/logger', (batch) => { batch.beforeEach((done) => { @@ -32,28 +25,20 @@ test('../../lib/logger', (batch) => { batch.test('constructor with no parameters', (t) => { t.throws( () => new Logger(), - new Error('No dispatch function provided.') - ); - t.end(); - }); - - batch.test('constructor with only dispatch', (t) => { - t.throws( - () => new Logger(dispatch), new Error('No category provided.') ); t.end(); }); batch.test('constructor with category', (t) => { - const logger = new Logger(dispatch, 'cheese'); + const logger = new Logger('cheese'); t.equal(logger.category, 'cheese', 'should use category'); t.equal(logger.level, levels.TRACE, 'should use TRACE log level'); t.end(); }); batch.test('set level should delegate', (t) => { - const logger = new Logger(dispatch, 'cheese'); + const logger = new Logger('cheese'); logger.level = 'debug'; t.equal(logger.category, 'cheese', 'should use category'); t.equal(logger.level, levels.DEBUG, 'should use level'); @@ -61,7 +46,7 @@ test('../../lib/logger', (batch) => { }); batch.test('isLevelEnabled', (t) => { - const logger = new Logger(dispatch, 'cheese'); + const logger = new Logger('cheese'); const functions = [ 'isTraceEnabled', 'isDebugEnabled', 'isInfoEnabled', 'isWarnEnabled', 'isErrorEnabled', 'isFatalEnabled' @@ -83,7 +68,7 @@ test('../../lib/logger', (batch) => { }); batch.test('should send log events to dispatch function', (t) => { - const logger = new Logger(dispatch, 'cheese'); + const logger = new Logger('cheese'); logger.debug('Event 1'); logger.debug('Event 2'); logger.debug('Event 3'); @@ -97,7 +82,7 @@ test('../../lib/logger', (batch) => { }); batch.test('should add context values to every event', (t) => { - const logger = new Logger(dispatch, 'fromage'); + const logger = new Logger('fromage'); logger.debug('Event 1'); logger.addContext('cheese', 'edam'); logger.debug('Event 2'); @@ -121,7 +106,7 @@ test('../../lib/logger', (batch) => { }); batch.test('should not break when log data has no toString', (t) => { - const logger = new Logger(dispatch, 'thing'); + const logger = new Logger('thing'); logger.info('Just testing ', Object.create(null)); const events = testDispatcher.events; diff --git a/test/tap/server-test.js b/test/tap/server-test.js index 77c2c9b..fb7bc5a 100644 --- a/test/tap/server-test.js +++ b/test/tap/server-test.js @@ -2,8 +2,8 @@ const test = require('tap').test; const net = require('net'); const log4js = require('../../lib/log4js'); const vcr = require('../../lib/appenders/recording'); -const levels = require('../../lib/levels')(); -const LoggingEvent = (require('../../lib/logger')(levels)).LoggingEvent; +const levels = require('../../lib/levels'); +const LoggingEvent = require('../../lib/LoggingEvent'); log4js.configure({ appenders: { From 5f8fc128690839bef4be61df696fbab5a74a204d Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 30 Jan 2018 08:52:49 +1100 Subject: [PATCH 05/34] fix(test): removed node 4, using proxy in test --- .travis.yml | 2 - lib/configuration.js | 3 +- package.json | 4 +- test/tap/configuration-validation-test.js | 590 +++++++++++----------- 4 files changed, 304 insertions(+), 295 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1409c12..cd584de 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,7 +5,5 @@ node_js: - "8" - "7" - "6" - - "5" - - "4" after_success: - npm run codecov diff --git a/lib/configuration.js b/lib/configuration.js index b798cf3..0a9e88b 100644 --- a/lib/configuration.js +++ b/lib/configuration.js @@ -16,6 +16,7 @@ const anInteger = thing => thing && typeof thing === 'number' && Number.isIntege const addListener = (fn) => { if (fn) { listeners.push(fn); + debug(`Added listener, listeners now ${listeners.length}`); } }; @@ -33,7 +34,7 @@ const configure = (candidate) => { debug('New configuration to be validated: ', candidate); throwExceptionIf(candidate, not(anObject(candidate)), 'must be an object.'); - debug('Calling configuration listeners'); + debug(`Calling configuration listeners (${listeners.length})`); listeners.forEach(listener => listener(candidate)); debug('Configuration finished.'); }; diff --git a/package.json b/package.json index 616f4e0..1bc7919 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,7 @@ "url": "http://github.com/log4js-node/log4js-node/issues" }, "engines": { - "node": ">=4.0" + "node": ">=6.0" }, "scripts": { "clean": "find test -type f ! -name '*.json' ! -name '*.js' ! -name '.eslintrc' -delete && rm *.log", @@ -56,7 +56,7 @@ "eslint-plugin-import": "^2.8.0", "husky": "^0.14.3", "nyc": "^11.3.0", - "sandboxed-module": "^2.0.3", + "@log4js-node/sandboxed-module": "^2.1.1", "tap": "^10.7.3", "validate-commit-msg": "^2.14.0" }, diff --git a/test/tap/configuration-validation-test.js b/test/tap/configuration-validation-test.js index 97660f0..00e08dc 100644 --- a/test/tap/configuration-validation-test.js +++ b/test/tap/configuration-validation-test.js @@ -1,12 +1,11 @@ 'use strict'; const test = require('tap').test; -// const util = require('util'); -// const path = require('path'); -const sandbox = require('sandboxed-module'); -// const log4js = require('../../lib/log4js'); -// const appenders = require('../../lib/appenders'); -// const configuration = require('../../lib/configuration'); +const util = require('util'); +const path = require('path'); +const sandbox = require('@log4js-node/sandboxed-module'); +const log4js = require('../../lib/log4js'); +const configuration = require('../../lib/configuration'); const debug = require('debug')('log4js:test.configuration-validation'); const testAppender = (label, result) => ({ @@ -23,196 +22,196 @@ const testAppender = (label, result) => ({ }); test('log4js configuration validation', (batch) => { -// batch.test('should give error if config is just plain silly', (t) => { -// [null, undefined, '', ' ', []].forEach((config) => { -// const expectedError = -// new Error(`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`); -// t.throws( -// () => configuration.configure(config), -// expectedError -// ); -// }); -// -// t.end(); -// }); -// -// batch.test('should give error if config is an empty object', (t) => { -// const expectedError = -// new Error('Problem with log4js configuration: ({}) - must have a property "appenders" of type object.'); -// t.throws(() => log4js.configure({}), expectedError); -// t.end(); -// }); -// -// batch.test('should give error if config has no appenders', (t) => { -// const expectedError = -// new Error('Problem with log4js configuration: ({ categories: {} }) ' + -// '- must have a property "appenders" of type object.'); -// t.throws(() => log4js.configure({ categories: {} }), expectedError); -// t.end(); -// }); -// -// batch.test('should give error if config has no categories', (t) => { -// const expectedError = -// new Error('Problem with log4js configuration: ({ appenders: { out: { type: \'stdout\' } } }) ' + -// '- must have a property "categories" of type object.'); -// t.throws(() => log4js.configure({ appenders: { out: { type: 'stdout' } } }), expectedError); -// t.end(); -// }); -// -// batch.test('should give error if appenders is not an object', (t) => { -// const error = -// new Error('Problem with log4js configuration: ({ appenders: [], categories: [] })' + -// ' - must have a property "appenders" of type object.'); -// t.throws( -// () => log4js.configure({ appenders: [], categories: [] }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if appenders are not all valid', (t) => { -// const error = -// new Error('Problem with log4js configuration: ({ appenders: { thing: \'cheese\' }, categories: {} })' + -// ' - appender "thing" is not valid (must be an object with property "type")'); -// t.throws( -// () => log4js.configure({ appenders: { thing: 'cheese' }, categories: {} }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should require at least one appender', (t) => { -// const error = new Error('Problem with log4js configuration: ({ appenders: {}, categories: {} })' + -// ' - must define at least one appender.'); -// t.throws( -// () => log4js.configure({ appenders: {}, categories: {} }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if categories are not all valid', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { stdout: { type: \'stdout\' } },\n categories: { thing: \'cheese\' } })' + -// ' - category "thing" is not valid (must be an object with properties "appenders" and "level")'); -// t.throws( -// () => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: { thing: 'cheese' } }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if default category not defined', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { stdout: { type: \'stdout\' } },\n' + -// ' categories: { thing: { appenders: [ \'stdout\' ], level: \'ERROR\' } } })' + -// ' - must define a "default" category.'); -// t.throws( -// () => log4js.configure({ -// appenders: { stdout: { type: 'stdout' } }, -// categories: { thing: { appenders: ['stdout'], level: 'ERROR' } } -// }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should require at least one category', (t) => { -// const error = -// new Error('Problem with log4js configuration: ({ appenders: { stdout: { type: \'stdout\' } }, categories: {} })' + -// ' - must define at least one category.'); -// t.throws( -// () => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: {} }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if category.appenders is not an array', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { stdout: { type: \'stdout\' } },\n' + -// ' categories: { thing: { appenders: {}, level: \'ERROR\' } } })' + -// ' - category "thing" is not valid (appenders must be an array of appender names)'); -// t.throws( -// () => log4js.configure({ -// appenders: { stdout: { type: 'stdout' } }, -// categories: { thing: { appenders: {}, level: 'ERROR' } } -// }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if category.appenders is empty', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { stdout: { type: \'stdout\' } },\n' + -// ' categories: { thing: { appenders: [], level: \'ERROR\' } } })' + -// ' - category "thing" is not valid (appenders must contain at least one appender name)'); -// t.throws( -// () => log4js.configure({ -// appenders: { stdout: { type: 'stdout' } }, -// categories: { thing: { appenders: [], level: 'ERROR' } } -// }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if categories do not refer to valid appenders', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { stdout: { type: \'stdout\' } },\n' + -// ' categories: { thing: { appenders: [ \'cheese\' ], level: \'ERROR\' } } })' + -// ' - category "thing" is not valid (appender "cheese" is not defined)'); -// t.throws( -// () => log4js.configure({ -// appenders: { stdout: { type: 'stdout' } }, -// categories: { thing: { appenders: ['cheese'], level: 'ERROR' } } -// }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if category level is not valid', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { stdout: { type: \'stdout\' } },\n' + -// ' categories: { default: { appenders: [ \'stdout\' ], level: \'Biscuits\' } } })' + -// ' - category "default" is not valid (level "Biscuits" not recognised; ' + -// 'valid levels are ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, MARK, OFF)'); -// t.throws( -// () => log4js.configure({ -// appenders: { stdout: { type: 'stdout' } }, -// categories: { default: { appenders: ['stdout'], level: 'Biscuits' } } -// }), -// error -// ); -// t.end(); -// }); -// -// batch.test('should give error if appender type cannot be found', (t) => { -// const error = new Error('Problem with log4js configuration: ' + -// '({ appenders: { thing: { type: \'cheese\' } },\n' + -// ' categories: { default: { appenders: [ \'thing\' ], level: \'ERROR\' } } })' + -// ' - appender "thing" is not valid (type "cheese" could not be found)'); -// t.throws( -// () => log4js.configure({ -// appenders: { thing: { type: 'cheese' } }, -// categories: { default: { appenders: ['thing'], level: 'ERROR' } } -// }), -// error -// ); -// t.end(); -// }); + batch.test('should give error if config is just plain silly', (t) => { + [null, undefined, '', ' ', []].forEach((config) => { + const expectedError = + new Error(`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`); + t.throws( + () => configuration.configure(config), + expectedError + ); + }); + + t.end(); + }); + + batch.test('should give error if config is an empty object', (t) => { + const expectedError = + new Error('Problem with log4js configuration: ({}) - must have a property "appenders" of type object.'); + t.throws(() => log4js.configure({}), expectedError); + t.end(); + }); + + batch.test('should give error if config has no appenders', (t) => { + const expectedError = + new Error('Problem with log4js configuration: ({ categories: {} }) ' + + '- must have a property "appenders" of type object.'); + t.throws(() => log4js.configure({ categories: {} }), expectedError); + t.end(); + }); + + batch.test('should give error if config has no categories', (t) => { + const expectedError = + new Error('Problem with log4js configuration: ({ appenders: { out: { type: \'stdout\' } } }) ' + + '- must have a property "categories" of type object.'); + t.throws(() => log4js.configure({ appenders: { out: { type: 'stdout' } } }), expectedError); + t.end(); + }); + + batch.test('should give error if appenders is not an object', (t) => { + const error = + new Error('Problem with log4js configuration: ({ appenders: [], categories: [] })' + + ' - must have a property "appenders" of type object.'); + t.throws( + () => log4js.configure({ appenders: [], categories: [] }), + error + ); + t.end(); + }); + + batch.test('should give error if appenders are not all valid', (t) => { + const error = + new Error('Problem with log4js configuration: ({ appenders: { thing: \'cheese\' }, categories: {} })' + + ' - appender "thing" is not valid (must be an object with property "type")'); + t.throws( + () => log4js.configure({ appenders: { thing: 'cheese' }, categories: {} }), + error + ); + t.end(); + }); + + batch.test('should require at least one appender', (t) => { + const error = new Error('Problem with log4js configuration: ({ appenders: {}, categories: {} })' + + ' - must define at least one appender.'); + t.throws( + () => log4js.configure({ appenders: {}, categories: {} }), + error + ); + t.end(); + }); + + batch.test('should give error if categories are not all valid', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { stdout: { type: \'stdout\' } },\n categories: { thing: \'cheese\' } })' + + ' - category "thing" is not valid (must be an object with properties "appenders" and "level")'); + t.throws( + () => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: { thing: 'cheese' } }), + error + ); + t.end(); + }); + + batch.test('should give error if default category not defined', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { stdout: { type: \'stdout\' } },\n' + + ' categories: { thing: { appenders: [ \'stdout\' ], level: \'ERROR\' } } })' + + ' - must define a "default" category.'); + t.throws( + () => log4js.configure({ + appenders: { stdout: { type: 'stdout' } }, + categories: { thing: { appenders: ['stdout'], level: 'ERROR' } } + }), + error + ); + t.end(); + }); + + batch.test('should require at least one category', (t) => { + const error = + new Error('Problem with log4js configuration: ({ appenders: { stdout: { type: \'stdout\' } }, categories: {} })' + + ' - must define at least one category.'); + t.throws( + () => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: {} }), + error + ); + t.end(); + }); + + batch.test('should give error if category.appenders is not an array', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { stdout: { type: \'stdout\' } },\n' + + ' categories: { thing: { appenders: {}, level: \'ERROR\' } } })' + + ' - category "thing" is not valid (appenders must be an array of appender names)'); + t.throws( + () => log4js.configure({ + appenders: { stdout: { type: 'stdout' } }, + categories: { thing: { appenders: {}, level: 'ERROR' } } + }), + error + ); + t.end(); + }); + + batch.test('should give error if category.appenders is empty', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { stdout: { type: \'stdout\' } },\n' + + ' categories: { thing: { appenders: [], level: \'ERROR\' } } })' + + ' - category "thing" is not valid (appenders must contain at least one appender name)'); + t.throws( + () => log4js.configure({ + appenders: { stdout: { type: 'stdout' } }, + categories: { thing: { appenders: [], level: 'ERROR' } } + }), + error + ); + t.end(); + }); + + batch.test('should give error if categories do not refer to valid appenders', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { stdout: { type: \'stdout\' } },\n' + + ' categories: { thing: { appenders: [ \'cheese\' ], level: \'ERROR\' } } })' + + ' - category "thing" is not valid (appender "cheese" is not defined)'); + t.throws( + () => log4js.configure({ + appenders: { stdout: { type: 'stdout' } }, + categories: { thing: { appenders: ['cheese'], level: 'ERROR' } } + }), + error + ); + t.end(); + }); + + batch.test('should give error if category level is not valid', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { stdout: { type: \'stdout\' } },\n' + + ' categories: { default: { appenders: [ \'stdout\' ], level: \'Biscuits\' } } })' + + ' - category "default" is not valid (level "Biscuits" not recognised; ' + + 'valid levels are ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, MARK, OFF)'); + t.throws( + () => log4js.configure({ + appenders: { stdout: { type: 'stdout' } }, + categories: { default: { appenders: ['stdout'], level: 'Biscuits' } } + }), + error + ); + t.end(); + }); + + batch.test('should give error if appender type cannot be found', (t) => { + const error = new Error('Problem with log4js configuration: ' + + '({ appenders: { thing: { type: \'cheese\' } },\n' + + ' categories: { default: { appenders: [ \'thing\' ], level: \'ERROR\' } } })' + + ' - appender "thing" is not valid (type "cheese" could not be found)'); + t.throws( + () => log4js.configure({ + appenders: { thing: { type: 'cheese' } }, + categories: { default: { appenders: ['thing'], level: 'ERROR' } } + }), + error + ); + t.end(); + }); batch.test('should create appender instances', (t) => { const thing = {}; const sandboxedLog4js = sandbox.require( '../../lib/log4js', { - singleOnly: true, requires: { cheese: testAppender('cheesy', thing) - } + }, + ignoreMissing: true } ); @@ -226,108 +225,119 @@ test('log4js configuration validation', (batch) => { t.end(); }); - // batch.test('should load appenders from core first', (t) => { - // const sandboxedLog4js = sandbox.require( - // '../../lib/log4js', - // { - // singleOnly: true, - // requires: { - // './cheese': testAppender('correct'), - // cheese: testAppender('wrong') - // } - // } - // ); - // - // sandboxedLog4js.configure({ - // appenders: { thing: { type: 'cheese' } }, - // categories: { default: { appenders: ['thing'], level: 'ERROR' } } - // }); - // - // const thing = appenders.get('thing'); - // t.ok(thing.configureCalled); - // t.equal(thing.type, 'cheese'); - // t.equal(thing.label, 'correct'); - // t.end(); - // }); - // - // batch.test('should load appenders relative to main file if not in core, or node_modules', (t) => { - // const mainPath = path.dirname(require.main.filename); - // const sandboxConfig = { - // singleOnly: true, - // requires: {} - // }; - // sandboxConfig.requires[`${mainPath}/cheese`] = testAppender('correct'); - // // add this one, because when we're running coverage the main path is a bit different - // sandboxConfig.requires[ - // `${path.join(mainPath, '../../node_modules/tap/node_modules/nyc/bin/cheese')}` - // ] = testAppender('correct'); - // const sandboxedLog4js = sandbox.require('../../lib/log4js', sandboxConfig); - // - // sandboxedLog4js.configure({ - // appenders: { thing: { type: 'cheese' } }, - // categories: { default: { appenders: ['thing'], level: 'ERROR' } } - // }); - // - // const thing = appenders.get('thing'); - // t.ok(thing.configureCalled); - // t.equal(thing.type, 'cheese'); - // t.equal(thing.label, 'correct'); - // t.end(); - // }); - // - // batch.test('should load appenders relative to process.cwd if not found in core, node_modules', (t) => { - // const sandboxedLog4js = sandbox.require( - // '../../lib/log4js', - // { - // singleOnly: true, - // requires: { - // '/var/lib/cheese/cheese': testAppender('correct'), - // }, - // globals: { - // process: { cwd: () => '/var/lib/cheese', env: {} } - // } - // } - // ); - // - // sandboxedLog4js.configure({ - // appenders: { thing: { type: 'cheese' } }, - // categories: { default: { appenders: ['thing'], level: 'ERROR' } } - // }); - // - // const thing = appenders.get('thing'); - // t.ok(thing.configureCalled); - // t.equal(thing.type, 'cheese'); - // t.equal(thing.label, 'correct'); - // t.end(); - // }); - // - // batch.test('should pass config, layout, findAppender to appenders', (t) => { - // const sandboxedLog4js = sandbox.require( - // '../../lib/log4js', - // { - // singleOnly: true, - // requires: { - // './appenders': appenders, - // cheese: testAppender('cheesy') - // } - // } - // ); - // - // sandboxedLog4js.configure({ - // appenders: { thing: { type: 'cheese', foo: 'bar' }, thing2: { type: 'cheese' } }, - // categories: { default: { appenders: ['thing'], level: 'ERROR' } } - // }); - // - // const thing = appenders.get('thing'); - // t.ok(thing.configureCalled); - // t.equal(thing.type, 'cheese'); - // t.equal(thing.config.foo, 'bar'); - // t.type(thing.layouts, 'object'); - // t.type(thing.layouts.basicLayout, 'function'); - // t.type(thing.findAppender, 'function'); - // t.type(thing.findAppender('thing2'), 'object'); - // t.end(); - // }); + batch.test('should load appenders from core first', (t) => { + const result = {}; + const sandboxedLog4js = sandbox.require( + '../../lib/log4js', + { + requires: { + './cheese': testAppender('correct', result), + cheese: testAppender('wrong', result) + }, + ignoreMissing: true + } + ); + + sandboxedLog4js.configure({ + appenders: { thing: { type: 'cheese' } }, + categories: { default: { appenders: ['thing'], level: 'ERROR' } } + }); + + t.ok(result.configureCalled); + t.equal(result.type, 'cheese'); + t.equal(result.label, 'correct'); + t.end(); + }); + + batch.test('should load appenders relative to main file if not in core, or node_modules', (t) => { + const result = {}; + const mainPath = path.dirname(require.main.filename); + const sandboxConfig = { + ignoreMissing: true, + requires: {} + }; + sandboxConfig.requires[`${mainPath}/cheese`] = testAppender('correct', result); + // add this one, because when we're running coverage the main path is a bit different + sandboxConfig.requires[ + `${path.join(mainPath, '../../node_modules/tap/node_modules/nyc/bin/cheese')}` + ] = testAppender('correct', result); + const sandboxedLog4js = sandbox.require('../../lib/log4js', sandboxConfig); + + sandboxedLog4js.configure({ + appenders: { thing: { type: 'cheese' } }, + categories: { default: { appenders: ['thing'], level: 'ERROR' } } + }); + + t.ok(result.configureCalled); + t.equal(result.type, 'cheese'); + t.equal(result.label, 'correct'); + t.end(); + }); + + batch.test('should load appenders relative to process.cwd if not found in core, node_modules', (t) => { + const result = {}; + const fakeProcess = new Proxy(process, { + get(target, key) { + if (key === 'cwd') { + return () => '/var/lib/cheese'; + } + + return target[key]; + } + }); + const sandboxedLog4js = sandbox.require( + '../../lib/log4js', + { + ignoreMissing: true, + requires: { + '/var/lib/cheese/cheese': testAppender('correct', result), + }, + globals: { + process: fakeProcess + } + } + ); + + sandboxedLog4js.configure({ + appenders: { thing: { type: 'cheese' } }, + categories: { default: { appenders: ['thing'], level: 'ERROR' } } + }); + + t.ok(result.configureCalled); + t.equal(result.type, 'cheese'); + t.equal(result.label, 'correct'); + t.end(); + }); + + batch.test('should pass config, layout, findAppender to appenders', (t) => { + const result = {}; + const sandboxedLog4js = sandbox.require( + '../../lib/log4js', + { + ignoreMissing: true, + requires: { + cheese: testAppender('cheesy', result), + notCheese: testAppender('notCheesy', {}) + } + } + ); + + sandboxedLog4js.configure({ + appenders: { thing: { type: 'cheese', foo: 'bar' }, thing2: { type: 'notCheese' } }, + categories: { default: { appenders: ['thing'], level: 'ERROR' } } + }); + + t.ok(result.configureCalled); + t.equal(result.type, 'cheese'); + t.equal(result.config.foo, 'bar'); + t.type(result.layouts, 'object'); + t.type(result.layouts.basicLayout, 'function'); + t.type(result.findAppender, 'function'); + t.type(result.findAppender('thing2'), 'object'); + debug(`thing2: ${util.inspect(result.findAppender('thing2'))}`); + t.type(result.findAppender('thing2').type, 'notCheese'); + t.end(); + }); batch.end(); }); From 8f7fde9da0b3db775328a055cf9505e48af35444 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 1 Feb 2018 07:56:49 +1100 Subject: [PATCH 06/34] fix(test): sandboxed-module -> log4js-node version --- test/sandbox-coverage.js | 2 +- test/tap/configuration-test.js | 2 +- test/tap/configuration-validation-test.js | 4 +--- test/tap/consoleAppender-test.js | 2 +- test/tap/default-settings-test.js | 2 +- test/tap/file-sighup-test.js | 2 +- test/tap/fileAppender-test.js | 2 +- test/tap/gelfAppender-test.js | 2 +- test/tap/hipchatAppender-test.js | 2 +- test/tap/logFaces-HTTP-test.js | 2 +- test/tap/logFaces-UDP-test.js | 2 +- test/tap/logging-test.js | 2 +- test/tap/logglyAppender-test.js | 2 +- test/tap/logstashHTTP-test.js | 2 +- test/tap/logstashUDP-test.js | 2 +- test/tap/mailgunAppender-test.js | 2 +- test/tap/multiprocess-shutdown-test.js | 2 +- test/tap/multiprocess-test.js | 2 +- test/tap/rabbitmqAppender-test.js | 2 +- test/tap/redisAppender-test.js | 2 +- test/tap/slackAppender-test.js | 2 +- test/tap/smtpAppender-test.js | 2 +- test/tap/stderrAppender-test.js | 2 +- test/tap/stdoutAppender-test.js | 2 +- 24 files changed, 24 insertions(+), 26 deletions(-) diff --git a/test/sandbox-coverage.js b/test/sandbox-coverage.js index 8be97c8..7604fab 100644 --- a/test/sandbox-coverage.js +++ b/test/sandbox-coverage.js @@ -1,6 +1,6 @@ 'use strict'; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); sandbox.configure({ sourceTransformers: { diff --git a/test/tap/configuration-test.js b/test/tap/configuration-test.js index 5b0576d..feac5c1 100644 --- a/test/tap/configuration-test.js +++ b/test/tap/configuration-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('log4js configure', (batch) => { batch.test('when configuration file loaded via LOG4JS_CONFIG env variable', (t) => { diff --git a/test/tap/configuration-validation-test.js b/test/tap/configuration-validation-test.js index 00e08dc..c1b0a4b 100644 --- a/test/tap/configuration-validation-test.js +++ b/test/tap/configuration-validation-test.js @@ -10,7 +10,7 @@ const debug = require('debug')('log4js:test.configuration-validation'); const testAppender = (label, result) => ({ configure: function (config, layouts, findAppender) { - debug(`testAppender(${label}).configure called`); + debug(`testAppender(${label}).configure called, with config: ${util.inspect(config)}`); result.configureCalled = true; result.type = config.type; result.label = label; @@ -334,8 +334,6 @@ test('log4js configuration validation', (batch) => { t.type(result.layouts.basicLayout, 'function'); t.type(result.findAppender, 'function'); t.type(result.findAppender('thing2'), 'object'); - debug(`thing2: ${util.inspect(result.findAppender('thing2'))}`); - t.type(result.findAppender('thing2').type, 'notCheese'); t.end(); }); diff --git a/test/tap/consoleAppender-test.js b/test/tap/consoleAppender-test.js index 499f2d3..50293c9 100644 --- a/test/tap/consoleAppender-test.js +++ b/test/tap/consoleAppender-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('log4js console appender', (batch) => { batch.test('should output to console', (t) => { diff --git a/test/tap/default-settings-test.js b/test/tap/default-settings-test.js index ee1c275..3538721 100644 --- a/test/tap/default-settings-test.js +++ b/test/tap/default-settings-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('default settings', (t) => { const output = []; diff --git a/test/tap/file-sighup-test.js b/test/tap/file-sighup-test.js index f863851..7448ae6 100644 --- a/test/tap/file-sighup-test.js +++ b/test/tap/file-sighup-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('file appender SIGHUP', (t) => { let closeCalled = 0; diff --git a/test/tap/fileAppender-test.js b/test/tap/fileAppender-test.js index 39b0394..1cc9f94 100644 --- a/test/tap/fileAppender-test.js +++ b/test/tap/fileAppender-test.js @@ -3,7 +3,7 @@ const test = require('tap').test; const fs = require('fs'); const path = require('path'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); const log4js = require('../../lib/log4js'); const zlib = require('zlib'); const EOL = require('os').EOL || '\n'; diff --git a/test/tap/gelfAppender-test.js b/test/tap/gelfAppender-test.js index b473ee4..7ea72c2 100644 --- a/test/tap/gelfAppender-test.js +++ b/test/tap/gelfAppender-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); const realLayouts = require('../../lib/layouts'); const setupLogging = function (options, category, compressedLength) { diff --git a/test/tap/hipchatAppender-test.js b/test/tap/hipchatAppender-test.js index 2d58687..09f92fd 100644 --- a/test/tap/hipchatAppender-test.js +++ b/test/tap/hipchatAppender-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const lastRequest = {}; diff --git a/test/tap/logFaces-HTTP-test.js b/test/tap/logFaces-HTTP-test.js index 05df419..56e73ad 100644 --- a/test/tap/logFaces-HTTP-test.js +++ b/test/tap/logFaces-HTTP-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const fakeAxios = { diff --git a/test/tap/logFaces-UDP-test.js b/test/tap/logFaces-UDP-test.js index a5f0fa8..82743d3 100644 --- a/test/tap/logFaces-UDP-test.js +++ b/test/tap/logFaces-UDP-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const fakeDgram = { diff --git a/test/tap/logging-test.js b/test/tap/logging-test.js index 554f9d1..30c8215 100644 --- a/test/tap/logging-test.js +++ b/test/tap/logging-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); const recording = require('../../lib/appenders/recording'); test('log4js', (batch) => { diff --git a/test/tap/logglyAppender-test.js b/test/tap/logglyAppender-test.js index 400a4b8..f0606d4 100644 --- a/test/tap/logglyAppender-test.js +++ b/test/tap/logglyAppender-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); const layouts = require('../../lib/layouts'); function setupLogging(category, options) { diff --git a/test/tap/logstashHTTP-test.js b/test/tap/logstashHTTP-test.js index 7230703..eeb94be 100644 --- a/test/tap/logstashHTTP-test.js +++ b/test/tap/logstashHTTP-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const fakeAxios = { diff --git a/test/tap/logstashUDP-test.js b/test/tap/logstashUDP-test.js index 394b865..ccd9b65 100644 --- a/test/tap/logstashUDP-test.js +++ b/test/tap/logstashUDP-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const udpSent = {}; diff --git a/test/tap/mailgunAppender-test.js b/test/tap/mailgunAppender-test.js index 248bd4a..1d0eaac 100644 --- a/test/tap/mailgunAppender-test.js +++ b/test/tap/mailgunAppender-test.js @@ -2,7 +2,7 @@ const test = require('tap').test; const layouts = require('../../lib/layouts'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const msgs = []; diff --git a/test/tap/multiprocess-shutdown-test.js b/test/tap/multiprocess-shutdown-test.js index f3a537e..52e3cc8 100644 --- a/test/tap/multiprocess-shutdown-test.js +++ b/test/tap/multiprocess-shutdown-test.js @@ -3,7 +3,7 @@ const test = require('tap').test; const log4js = require('../../lib/log4js'); const net = require('net'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('multiprocess appender shutdown (master)', { timeout: 2000 }, (t) => { log4js.configure({ diff --git a/test/tap/multiprocess-test.js b/test/tap/multiprocess-test.js index 7a85e18..b9d3f5b 100644 --- a/test/tap/multiprocess-test.js +++ b/test/tap/multiprocess-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); const recording = require('../../lib/appenders/recording'); function makeFakeNet() { diff --git a/test/tap/rabbitmqAppender-test.js b/test/tap/rabbitmqAppender-test.js index 85b97b4..1e6d9d1 100644 --- a/test/tap/rabbitmqAppender-test.js +++ b/test/tap/rabbitmqAppender-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const fakeRabbitmq = { diff --git a/test/tap/redisAppender-test.js b/test/tap/redisAppender-test.js index 2c42af3..fb35a48 100644 --- a/test/tap/redisAppender-test.js +++ b/test/tap/redisAppender-test.js @@ -2,7 +2,7 @@ const test = require('tap').test; // const log4js = require('../../lib/log4js'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { const fakeRedis = { diff --git a/test/tap/slackAppender-test.js b/test/tap/slackAppender-test.js index 75048d6..d2a643a 100644 --- a/test/tap/slackAppender-test.js +++ b/test/tap/slackAppender-test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('tap').test; -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); const realLayouts = require('../../lib/layouts'); function setupLogging(category, options) { diff --git a/test/tap/smtpAppender-test.js b/test/tap/smtpAppender-test.js index 497d076..ccccc94 100644 --- a/test/tap/smtpAppender-test.js +++ b/test/tap/smtpAppender-test.js @@ -2,7 +2,7 @@ const test = require('tap').test; const realLayouts = require('../../lib/layouts'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options, errorOnSend) { const msgs = []; diff --git a/test/tap/stderrAppender-test.js b/test/tap/stderrAppender-test.js index ee61e9a..f52856f 100644 --- a/test/tap/stderrAppender-test.js +++ b/test/tap/stderrAppender-test.js @@ -2,7 +2,7 @@ const test = require('tap').test; const layouts = require('../../lib/layouts'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('stderr appender', (t) => { const output = []; diff --git a/test/tap/stdoutAppender-test.js b/test/tap/stdoutAppender-test.js index a3b0ce4..c82b57e 100644 --- a/test/tap/stdoutAppender-test.js +++ b/test/tap/stdoutAppender-test.js @@ -2,7 +2,7 @@ const test = require('tap').test; const layouts = require('../../lib/layouts'); -const sandbox = require('sandboxed-module'); +const sandbox = require('@log4js-node/sandboxed-module'); test('stdout appender', (t) => { const output = []; From 11b21ce77360ae7e730d0bb173f186d04f2760ec Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 1 Feb 2018 07:58:46 +1100 Subject: [PATCH 07/34] fix(webpack): added loading of core appenders --- lib/appenders/index.js | 23 +++++++++++++---------- lib/categories.js | 3 --- lib/log4js.js | 30 +++++++++++++++++------------- 3 files changed, 30 insertions(+), 26 deletions(-) diff --git a/lib/appenders/index.js b/lib/appenders/index.js index c5448e0..930d574 100644 --- a/lib/appenders/index.js +++ b/lib/appenders/index.js @@ -5,6 +5,14 @@ const clustering = require('../clustering'); const levels = require('../levels'); const layouts = require('../layouts'); +// pre-load the core appenders so that webpack can find them +const coreAppenders = new Map(); +coreAppenders.set('console', require('./console')); +coreAppenders.set('stdout', require('./stdout')); +coreAppenders.set('stderr', require('./stderr')); +coreAppenders.set('file', require('./file')); +coreAppenders.set('dateFile', require('./dateFile')); + const appenders = new Map(); const tryLoading = (modulePath, config) => { @@ -22,10 +30,11 @@ const tryLoading = (modulePath, config) => { } }; -const loadAppenderModule = (type, config) => tryLoading(`./${type}`, config) || - tryLoading(type, config) || - tryLoading(path.join(path.dirname(require.main.filename), type), config) || - tryLoading(path.join(process.cwd(), type), config); +const loadAppenderModule = (type, config) => coreAppenders.get(type) || + tryLoading(`./${type}`, config) || + tryLoading(type, config) || + tryLoading(path.join(path.dirname(require.main.filename), type), config) || + tryLoading(path.join(process.cwd(), type), config); const createAppender = (name, config) => { const appenderConfig = config.appenders[name]; @@ -64,12 +73,6 @@ const setup = (config) => { }); }; -// setup({ -// appenders: { -// stdout: { type: 'stdout' } -// } -// }); - configuration.addListener((config) => { configuration.throwExceptionIf( config, diff --git a/lib/categories.js b/lib/categories.js index b107c66..5069669 100644 --- a/lib/categories.js +++ b/lib/categories.js @@ -83,9 +83,6 @@ const setup = (config) => { }); }; -// setup({ -// categories: { default: { appenders: ['stdout'], level: 'OFF' } } -// }); configuration.addListener(setup); const configForCategory = (category) => { diff --git a/lib/log4js.js b/lib/log4js.js index d015211..1ef0196 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -42,19 +42,6 @@ function sendLogEventToAppender(logEvent) { }); } -/** - * Get a logger instance. - * @static - * @param loggerCategoryName - * @return {Logger} instance of logger for the category - */ -function getLogger(category) { - if (!enabled) { - configure(process.env.LOG4JS_CONFIG || defaultConfig); - } - return new Logger(category || 'default'); -} - function loadConfigurationFile(filename) { if (filename) { debug(`Loading configuration from ${filename}`); @@ -118,6 +105,23 @@ function shutdown(cb) { return null; } +/** + * Get a logger instance. + * @static + * @param loggerCategoryName + * @return {Logger} instance of logger for the category + */ +function getLogger(category) { + if (!enabled) { + configure(process.env.LOG4JS_CONFIG || { + appenders: { out: { type: 'stdout' } }, + categories: { default: { appenders: ['out'], level: 'OFF' } } + }); + } + return new Logger(category || 'default'); +} + + /** * @name log4js * @namespace Log4js From d52734471dc16438c8199915f2eb49545f52e0f5 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 5 Feb 2018 07:04:46 +1100 Subject: [PATCH 08/34] fix(test): all tests passing except server --- lib/appenders/index.js | 2 ++ lib/appenders/recording.js | 1 + lib/categories.js | 1 + lib/clustering.js | 6 ++--- test/tap/connect-logger-test.js | 24 +++++++++---------- test/tap/connect-nolog-test.js | 8 +++---- test/tap/gelfAppender-test.js | 3 ++- test/tap/logFaces-HTTP-test.js | 1 + test/tap/logFaces-UDP-test.js | 1 + test/tap/logger-test.js | 38 ++++++++++++++++++++----------- test/tap/logglyAppender-test.js | 1 + test/tap/logstashHTTP-test.js | 1 + test/tap/rabbitmqAppender-test.js | 1 + test/tap/redisAppender-test.js | 2 +- test/tap/smtpAppender-test.js | 1 + 15 files changed, 56 insertions(+), 35 deletions(-) diff --git a/lib/appenders/index.js b/lib/appenders/index.js index 930d574..1a173c9 100644 --- a/lib/appenders/index.js +++ b/lib/appenders/index.js @@ -73,6 +73,8 @@ const setup = (config) => { }); }; +setup({ appenders: { out: { type: 'stdout' } } }); + configuration.addListener((config) => { configuration.throwExceptionIf( config, diff --git a/lib/appenders/recording.js b/lib/appenders/recording.js index f0fa2f2..13576c0 100644 --- a/lib/appenders/recording.js +++ b/lib/appenders/recording.js @@ -7,6 +7,7 @@ const recordedEvents = []; function configure() { return function (logEvent) { debug(`received logEvent, number of events now ${recordedEvents.length + 1}`); + debug('log event was ', logEvent); recordedEvents.push(logEvent); }; } diff --git a/lib/categories.js b/lib/categories.js index 5069669..37fcb8b 100644 --- a/lib/categories.js +++ b/lib/categories.js @@ -83,6 +83,7 @@ const setup = (config) => { }); }; +setup({ categories: { default: { appenders: ['out'], level: 'OFF' } } }); configuration.addListener(setup); const configForCategory = (category) => { diff --git a/lib/clustering.js b/lib/clustering.js index 814e320..7b90374 100644 --- a/lib/clustering.js +++ b/lib/clustering.js @@ -78,15 +78,13 @@ module.exports = { isWorker: isWorker, send: (msg) => { if (isWorker()) { - if (pm2) { - process.send({ type: 'log4js:message', data: msg.serialise() }); - } else { + if (!pm2) { msg.cluster = { workerId: cluster.worker.id, worker: process.pid }; - cluster.send({ type: 'log4js:message', data: msg.serialise() }); } + process.send({ topic: 'log4js:message', data: msg.serialise() }); } else { sendToListeners(msg); } diff --git a/test/tap/connect-logger-test.js b/test/tap/connect-logger-test.js index 5c61b99..ead71de 100644 --- a/test/tap/connect-logger-test.js +++ b/test/tap/connect-logger-test.js @@ -60,11 +60,11 @@ function request(cl, method, url, code, reqHeaders, resHeaders) { test('log4js connect logger', (batch) => { const clm = require('../../lib/connect-logger'); batch.test('getConnectLoggerModule', (t) => { - t.type(clm, 'object', 'should return a connect logger factory'); + t.type(clm, 'function', 'should return a connect logger factory'); t.test('should take a log4js logger and return a "connect logger"', (assert) => { const ml = new MockLogger(); - const cl = clm.connectLogger(ml); + const cl = clm(ml); assert.type(cl, 'function'); assert.end(); @@ -72,7 +72,7 @@ test('log4js connect logger', (batch) => { t.test('log events', (assert) => { const ml = new MockLogger(); - const cl = clm.connectLogger(ml); + const cl = clm(ml); request(cl, 'GET', 'http://url', 200); const messages = ml.messages; @@ -89,7 +89,7 @@ test('log4js connect logger', (batch) => { t.test('log events with level below logging level', (assert) => { const ml = new MockLogger(); ml.level = levels.FATAL; - const cl = clm.connectLogger(ml); + const cl = clm(ml); request(cl, 'GET', 'http://url', 200); assert.type(ml.messages, 'Array'); @@ -100,7 +100,7 @@ test('log4js connect logger', (batch) => { t.test('log events with non-default level and custom format', (assert) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' }); + const cl = clm(ml, { level: levels.INFO, format: ':method :url' }); request(cl, 'GET', 'http://url', 200); const messages = ml.messages; @@ -116,7 +116,7 @@ test('log4js connect logger', (batch) => { batch.test('logger with options as string', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, ':method :url'); + const cl = clm(ml, ':method :url'); request(cl, 'POST', 'http://meh', 200); const messages = ml.messages; @@ -127,7 +127,7 @@ test('log4js connect logger', (batch) => { batch.test('auto log levels', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' }); + const cl = clm(ml, { level: 'auto', format: ':method :url' }); request(cl, 'GET', 'http://meh', 200); request(cl, 'GET', 'http://meh', 201); request(cl, 'GET', 'http://meh', 302); @@ -161,7 +161,7 @@ test('log4js connect logger', (batch) => { batch.test('format using a function', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, () => 'I was called'); + const cl = clm(ml, () => 'I was called'); request(cl, 'GET', 'http://blah', 200); t.equal(ml.messages[0].message, 'I was called'); @@ -171,7 +171,7 @@ test('log4js connect logger', (batch) => { batch.test('format that includes request headers', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, ':req[Content-Type]'); + const cl = clm(ml, ':req[Content-Type]'); request( cl, 'GET', 'http://blah', 200, @@ -185,7 +185,7 @@ test('log4js connect logger', (batch) => { batch.test('format that includes response headers', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, ':res[Content-Type]'); + const cl = clm(ml, ':res[Content-Type]'); request( cl, 'GET', 'http://blah', 200, @@ -200,7 +200,7 @@ test('log4js connect logger', (batch) => { batch.test('log events with custom token', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, { + const cl = clm(ml, { level: levels.INFO, format: ':method :url :custom_string', tokens: [ @@ -221,7 +221,7 @@ test('log4js connect logger', (batch) => { batch.test('log events with custom override token', (t) => { const ml = new MockLogger(); ml.level = levels.INFO; - const cl = clm.connectLogger(ml, { + const cl = clm(ml, { level: levels.INFO, format: ':method :url :date', tokens: [ diff --git a/test/tap/connect-nolog-test.js b/test/tap/connect-nolog-test.js index 8d3370d..1c81208 100644 --- a/test/tap/connect-nolog-test.js +++ b/test/tap/connect-nolog-test.js @@ -45,7 +45,7 @@ test('log4js connect logger', (batch) => { batch.test('with nolog config', (t) => { const ml = new MockLogger(); - const cl = clm.connectLogger(ml, { nolog: '\\.gif' }); + const cl = clm(ml, { nolog: '\\.gif' }); t.beforeEach((done) => { ml.messages = []; done(); }); @@ -82,7 +82,7 @@ test('log4js connect logger', (batch) => { batch.test('nolog Strings', (t) => { const ml = new MockLogger(); - const cl = clm.connectLogger(ml, { nolog: '\\.gif|\\.jpe?g' }); + const cl = clm(ml, { nolog: '\\.gif|\\.jpe?g' }); t.beforeEach((done) => { ml.messages = []; done(); }); @@ -129,7 +129,7 @@ test('log4js connect logger', (batch) => { batch.test('nolog Array', (t) => { const ml = new MockLogger(); - const cl = clm.connectLogger(ml, { nolog: ['\\.gif', '\\.jpe?g'] }); + const cl = clm(ml, { nolog: ['\\.gif', '\\.jpe?g'] }); t.beforeEach((done) => { ml.messages = []; done(); }); @@ -176,7 +176,7 @@ test('log4js connect logger', (batch) => { batch.test('nolog RegExp', (t) => { const ml = new MockLogger(); - const cl = clm.connectLogger(ml, { nolog: /\.gif|\.jpe?g/ }); + const cl = clm(ml, { nolog: /\.gif|\.jpe?g/ }); t.beforeEach((done) => { ml.messages = []; done(); }); diff --git a/test/tap/gelfAppender-test.js b/test/tap/gelfAppender-test.js index 7ea72c2..562ec70 100644 --- a/test/tap/gelfAppender-test.js +++ b/test/tap/gelfAppender-test.js @@ -48,6 +48,7 @@ const setupLogging = function (options, category, compressedLength) { let exitHandler; const fakeConsole = { + log: () => {}, error: function (message) { this.message = message; } @@ -63,7 +64,6 @@ const setupLogging = function (options, category, compressedLength) { }; const log4js = sandbox.require('../../lib/log4js', { - // singleOnly: true, requires: { dgram: fakeDgram, zlib: fakeZlib, @@ -71,6 +71,7 @@ const setupLogging = function (options, category, compressedLength) { }, globals: { process: { + version: process.version, on: function (evt, handler) { if (evt === 'exit') { exitHandler = handler; diff --git a/test/tap/logFaces-HTTP-test.js b/test/tap/logFaces-HTTP-test.js index 56e73ad..7c285cf 100644 --- a/test/tap/logFaces-HTTP-test.js +++ b/test/tap/logFaces-HTTP-test.js @@ -21,6 +21,7 @@ function setupLogging(category, options) { }; const fakeConsole = { + log: () => {}, error: function (msg) { this.msg = msg; } diff --git a/test/tap/logFaces-UDP-test.js b/test/tap/logFaces-UDP-test.js index 82743d3..2b2fa85 100644 --- a/test/tap/logFaces-UDP-test.js +++ b/test/tap/logFaces-UDP-test.js @@ -21,6 +21,7 @@ function setupLogging(category, options) { }; const fakeConsole = { + log: () => {}, error: function (msg, err) { this.msg = msg; this.err = err; diff --git a/test/tap/logger-test.js b/test/tap/logger-test.js index 6dd35df..69dfa8f 100644 --- a/test/tap/logger-test.js +++ b/test/tap/logger-test.js @@ -1,23 +1,35 @@ 'use strict'; const test = require('tap').test; +const debug = require('debug')('log4js:test.logger'); const levels = require('../../lib/levels'); -const Logger = require('../../lib/logger'); +const sandbox = require('@log4js-node/sandboxed-module'); + +const events = []; +const Logger = sandbox.require( + '../../lib/logger', + { + requires: { + './levels': levels, + './clustering': { + isMaster: () => true, + onlyOnMaster: fn => fn(), + send: (evt) => { + debug('fake clustering got event:', evt); + events.push(evt); + } + } + } + } +); const testConfig = { level: levels.TRACE }; -const testDispatcher = { - events: [], - dispatch: function (evt) { - this.events.push(evt); - } -}; - test('../../lib/logger', (batch) => { batch.beforeEach((done) => { - testDispatcher.events = []; + events.length = 0; testConfig.level = levels.TRACE; done(); }); @@ -33,7 +45,7 @@ test('../../lib/logger', (batch) => { batch.test('constructor with category', (t) => { const logger = new Logger('cheese'); t.equal(logger.category, 'cheese', 'should use category'); - t.equal(logger.level, levels.TRACE, 'should use TRACE log level'); + t.equal(logger.level, levels.OFF, 'should use OFF log level'); t.end(); }); @@ -69,10 +81,10 @@ test('../../lib/logger', (batch) => { batch.test('should send log events to dispatch function', (t) => { const logger = new Logger('cheese'); + logger.level = 'debug'; logger.debug('Event 1'); logger.debug('Event 2'); logger.debug('Event 3'); - const events = testDispatcher.events; t.equal(events.length, 3); t.equal(events[0].data[0], 'Event 1'); @@ -83,6 +95,7 @@ test('../../lib/logger', (batch) => { batch.test('should add context values to every event', (t) => { const logger = new Logger('fromage'); + logger.level = 'debug'; logger.debug('Event 1'); logger.addContext('cheese', 'edam'); logger.debug('Event 2'); @@ -93,7 +106,6 @@ test('../../lib/logger', (batch) => { logger.debug('Event 5'); logger.clearContext(); logger.debug('Event 6'); - const events = testDispatcher.events; t.equal(events.length, 6); t.same(events[0].context, {}); @@ -107,9 +119,9 @@ test('../../lib/logger', (batch) => { batch.test('should not break when log data has no toString', (t) => { const logger = new Logger('thing'); + logger.level = 'debug'; logger.info('Just testing ', Object.create(null)); - const events = testDispatcher.events; t.equal(events.length, 1); t.end(); }); diff --git a/test/tap/logglyAppender-test.js b/test/tap/logglyAppender-test.js index f0606d4..ed27830 100644 --- a/test/tap/logglyAppender-test.js +++ b/test/tap/logglyAppender-test.js @@ -33,6 +33,7 @@ function setupLogging(category, options) { }; const fakeConsole = { + log: () => {}, errors: [], error: function (msg, value) { this.errors.push({ msg: msg, value: value }); diff --git a/test/tap/logstashHTTP-test.js b/test/tap/logstashHTTP-test.js index eeb94be..02a2b4a 100644 --- a/test/tap/logstashHTTP-test.js +++ b/test/tap/logstashHTTP-test.js @@ -21,6 +21,7 @@ function setupLogging(category, options) { }; const fakeConsole = { + log: () => {}, error: function (msg) { this.msg = msg; } diff --git a/test/tap/rabbitmqAppender-test.js b/test/tap/rabbitmqAppender-test.js index 1e6d9d1..11b2851 100644 --- a/test/tap/rabbitmqAppender-test.js +++ b/test/tap/rabbitmqAppender-test.js @@ -24,6 +24,7 @@ function setupLogging(category, options) { }; const fakeConsole = { + log: () => {}, errors: [], error: function (msg) { this.errors.push(msg); diff --git a/test/tap/redisAppender-test.js b/test/tap/redisAppender-test.js index fb35a48..25e05e8 100644 --- a/test/tap/redisAppender-test.js +++ b/test/tap/redisAppender-test.js @@ -1,7 +1,6 @@ 'use strict'; const test = require('tap').test; -// const log4js = require('../../lib/log4js'); const sandbox = require('@log4js-node/sandboxed-module'); function setupLogging(category, options) { @@ -25,6 +24,7 @@ function setupLogging(category, options) { }; const fakeConsole = { + log: () => {}, errors: [], error: function (msg) { this.errors.push(msg); diff --git a/test/tap/smtpAppender-test.js b/test/tap/smtpAppender-test.js index ccccc94..4aad5df 100644 --- a/test/tap/smtpAppender-test.js +++ b/test/tap/smtpAppender-test.js @@ -36,6 +36,7 @@ function setupLogging(category, options, errorOnSend) { }; const fakeConsole = { + log: () => {}, errors: [], error: function (msg, value) { this.errors.push({ msg: msg, value: value }); From ac853f090d0bc01a3e73f6889fc9e0b0ef26ad37 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 5 Feb 2018 08:54:32 +1100 Subject: [PATCH 09/34] fix(tcp): got server working --- lib/appenders/tcp-server.js | 39 ++++++++++++++++++++++++++++ lib/server.js | 24 ----------------- test/tap/server-test.js | 51 ++++++++++++++++++++----------------- 3 files changed, 66 insertions(+), 48 deletions(-) create mode 100644 lib/appenders/tcp-server.js delete mode 100644 lib/server.js diff --git a/lib/appenders/tcp-server.js b/lib/appenders/tcp-server.js new file mode 100644 index 0000000..6a9d1fb --- /dev/null +++ b/lib/appenders/tcp-server.js @@ -0,0 +1,39 @@ +const debug = require('debug')('log4js:tcp-server'); +const net = require('net'); +const clustering = require('../clustering'); +const LoggingEvent = require('../LoggingEvent'); + +const send = (data) => { + if (data) { + const event = LoggingEvent.deserialise(data); + clustering.send(event); + } +}; + +exports.configure = (config) => { + debug('configure called with ', config); + // dummy shutdown if we're not master + let shutdown = (cb) => { cb(); }; + + clustering.onlyOnMaster(() => { + const server = net.createServer((socket) => { + socket.setEncoding('utf8'); + socket.on('data', send); + socket.on('end', send); + }); + + server.listen(config.port || 5000, config.host || 'localhost', () => { + debug(`listening on ${config.host || 'localhost'}:${config.port || 5000}`); + server.unref(); + }); + + shutdown = (cb) => { + debug('shutdown called.'); + server.close(cb); + }; + }); + + return { + shutdown + }; +}; diff --git a/lib/server.js b/lib/server.js deleted file mode 100644 index 32cd555..0000000 --- a/lib/server.js +++ /dev/null @@ -1,24 +0,0 @@ -const net = require('net'); - -module.exports = (config, clustering) => { - // dummy shutdown if we're not master - let shutdown = (cb) => { cb(); }; - - clustering.onlyOnMaster(() => { - const server = net.createServer((socket) => { - socket.setEncoding('utf8'); - socket.on('data', clustering.send); - socket.on('end', clustering.send); - }); - - server.listen(config.port || 5000, config.host || 'localhost', () => { - server.unref(); - }); - - shutdown = (cb) => { - server.close(cb); - }; - }); - - return shutdown; -}; diff --git a/test/tap/server-test.js b/test/tap/server-test.js index fb7bc5a..3be4f98 100644 --- a/test/tap/server-test.js +++ b/test/tap/server-test.js @@ -7,38 +7,41 @@ const LoggingEvent = require('../../lib/LoggingEvent'); log4js.configure({ appenders: { - vcr: { type: 'recording' } + vcr: { type: 'recording' }, + tcp: { type: 'tcp-server', port: 5678 } }, categories: { default: { appenders: ['vcr'], level: 'debug' } - }, - listen: { - port: 5678 } }); test('TCP Server', (batch) => { batch.test('should listen for TCP messages and re-send via process.send', (t) => { - const socket = net.connect(5678, () => { - socket.write( - (new LoggingEvent('test-category', levels.INFO, ['something'], {})).serialise(), - () => { - socket.end(); - log4js.shutdown(() => { - const logs = vcr.replay(); - t.equal(logs.length, 1); - t.match(logs[0], { - data: ['something'], - categoryName: 'test-category', - level: { levelStr: 'INFO' }, - context: {} - }); - t.end(); - }); - } - ); - }); - socket.unref(); + // give the socket a chance to start up + setTimeout(() => { + const socket = net.connect(5678, () => { + socket.write( + (new LoggingEvent('test-category', levels.INFO, ['something'], {})).serialise(), + () => { + socket.end(); + setTimeout(() => { + log4js.shutdown(() => { + const logs = vcr.replay(); + t.equal(logs.length, 1); + t.match(logs[0], { + data: ['something'], + categoryName: 'test-category', + level: { levelStr: 'INFO' }, + context: {} + }); + t.end(); + }); + }, 100); + } + ); + }); + socket.unref(); + }, 100); }); batch.end(); }); From 09a72eb17ae297cd4e6de2cbd5d7929ee0c73660 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 7 Feb 2018 08:25:02 +1100 Subject: [PATCH 10/34] fix(test): added coverage of serialisation --- lib/LoggingEvent.js | 20 +++++++++++------ test/tap/LoggingEvent-test.js | 42 +++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) create mode 100644 test/tap/LoggingEvent-test.js diff --git a/lib/LoggingEvent.js b/lib/LoggingEvent.js index c1389b2..616bb02 100644 --- a/lib/LoggingEvent.js +++ b/lib/LoggingEvent.js @@ -30,11 +30,10 @@ class LoggingEvent { } serialise() { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - // Validate that we really are in this case try { const logData = this.data.map((e) => { + // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. + // The following allows us to serialize errors correctly. if (e && e.stack && CircularJSON.stringify(e) === '{}') { e = { message: e.message, stack: e.stack }; } @@ -54,10 +53,8 @@ class LoggingEvent { static deserialise(serialised) { let event; try { - event = CircularJSON.parse(serialised); - event.startTime = new Date(event.startTime); - event.level = levels.getLevel(event.level.levelStr); - event.data = event.data.map((e) => { + const rehydratedEvent = CircularJSON.parse(serialised); + rehydratedEvent.data = rehydratedEvent.data.map((e) => { if (e && e.stack) { const fakeError = new Error(e.message); fakeError.stack = e.stack; @@ -65,6 +62,15 @@ class LoggingEvent { } return e; }); + event = new LoggingEvent( + rehydratedEvent.categoryName, + levels.getLevel(rehydratedEvent.level.levelStr), + rehydratedEvent.data, + rehydratedEvent.context + ); + event.startTime = new Date(rehydratedEvent.startTime); + event.pid = rehydratedEvent.pid; + event.cluster = rehydratedEvent.cluster; } catch (e) { event = new LoggingEvent( 'log4js', diff --git a/test/tap/LoggingEvent-test.js b/test/tap/LoggingEvent-test.js new file mode 100644 index 0000000..a30c186 --- /dev/null +++ b/test/tap/LoggingEvent-test.js @@ -0,0 +1,42 @@ +const test = require('tap').test; +const LoggingEvent = require('../../lib/LoggingEvent'); +const levels = require('../../lib/levels'); + +test('LoggingEvent', (batch) => { + batch.test('should serialise to JSON', (t) => { + const event = new LoggingEvent('cheese', levels.DEBUG, ['log message'], { user: 'bob' }); + // set the event date to a known value + event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10)); + const rehydratedEvent = JSON.parse(event.serialise()); + t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z'); + t.equal(rehydratedEvent.categoryName, 'cheese'); + t.equal(rehydratedEvent.level.levelStr, 'DEBUG'); + t.equal(rehydratedEvent.data.length, 1); + t.equal(rehydratedEvent.data[0], 'log message'); + t.equal(rehydratedEvent.context.user, 'bob'); + t.end(); + }); + + batch.test('should deserialise from JSON', (t) => { + const dehydratedEvent = `{ + "startTime": "2018-02-04T10:25:23.010Z", + "categoryName": "biscuits", + "level": { + "levelStr": "INFO" + }, + "data": [ "some log message", { "x": 1 } ], + "context": { "thing": "otherThing" } + }`; + const event = LoggingEvent.deserialise(dehydratedEvent); + t.type(event, LoggingEvent); + t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10))); + t.equal(event.categoryName, 'biscuits'); + t.same(event.level, levels.INFO); + t.equal(event.data[0], 'some log message'); + t.equal(event.data[1].x, 1); + t.equal(event.context.thing, 'otherThing'); + t.end(); + }); + + batch.end(); +}); From 3203efcb55a31e9629b6eba5bc8647b70ca85588 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 8 Feb 2018 08:12:17 +1100 Subject: [PATCH 11/34] fix(json): removed try catch because circ json fixes typeerror --- lib/LoggingEvent.js | 34 ++++++++++------------------------ 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/lib/LoggingEvent.js b/lib/LoggingEvent.js index 616bb02..257f193 100644 --- a/lib/LoggingEvent.js +++ b/lib/LoggingEvent.js @@ -21,33 +21,19 @@ class LoggingEvent { this.level = level; this.context = Object.assign({}, context); this.pid = process.pid; - // if (cluster && cluster.isWorker) { - // this.cluster = { - // workerId: cluster.worker.id, - // worker: process.pid - // }; - // } } serialise() { - try { - const logData = this.data.map((e) => { - // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. - // The following allows us to serialize errors correctly. - if (e && e.stack && CircularJSON.stringify(e) === '{}') { - e = { message: e.message, stack: e.stack }; - } - return e; - }); - this.data = logData; - return CircularJSON.stringify(this); - } catch (e) { - return new LoggingEvent( - 'log4js', - levels.ERROR, - ['Unable to serialise log event due to :', e] - ).serialise(); - } + const logData = this.data.map((e) => { + // JSON.stringify(new Error('test')) returns {}, which is not really useful for us. + // The following allows us to serialize errors correctly. + if (e && e.stack && CircularJSON.stringify(e) === '{}') { + e = { message: e.message, stack: e.stack }; + } + return e; + }); + this.data = logData; + return CircularJSON.stringify(this); } static deserialise(serialised) { From c98a405c6062b0c7839e711edb102c38e3f08b28 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 8 Feb 2018 08:12:50 +1100 Subject: [PATCH 12/34] chore(test): added test for passenger clustering --- lib/clustering.js | 9 +++---- test/tap/passenger-test.js | 51 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 test/tap/passenger-test.js diff --git a/lib/clustering.js b/lib/clustering.js index 7b90374..e8ede70 100644 --- a/lib/clustering.js +++ b/lib/clustering.js @@ -11,7 +11,6 @@ let pm2InstanceVar = 'NODE_APP_INSTANCE'; const isPM2Master = () => pm2 && process.env[pm2InstanceVar] === '0'; const isMaster = () => disabled || cluster.isMaster || isPM2Master(); -const isWorker = () => !isMaster(); const sendToListeners = (logEvent) => { listeners.forEach(l => l(logEvent)); @@ -73,11 +72,11 @@ configuration.addListener((config) => { module.exports = { onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster), - onlyOnWorker: (fn, notWorker) => (isWorker() ? fn() : notWorker), isMaster: isMaster, - isWorker: isWorker, send: (msg) => { - if (isWorker()) { + if (isMaster()) { + sendToListeners(msg); + } else { if (!pm2) { msg.cluster = { workerId: cluster.worker.id, @@ -85,8 +84,6 @@ module.exports = { }; } process.send({ topic: 'log4js:message', data: msg.serialise() }); - } else { - sendToListeners(msg); } }, onMessage: (listener) => { diff --git a/test/tap/passenger-test.js b/test/tap/passenger-test.js new file mode 100644 index 0000000..bf69837 --- /dev/null +++ b/test/tap/passenger-test.js @@ -0,0 +1,51 @@ +const test = require('tap').test; +const sandbox = require('@log4js-node/sandboxed-module'); + +// passenger provides a non-functional cluster module, +// but it does not implement the event emitter functions +// this is taken from https://github.com/phusion/passenger/blob/82bef697c0019c034faeb9b0f8c08a43ec4e1e22/src/helper-scripts/node-loader.js#L64 +const passengerCluster = { + disconnect: function () { return false; }, + fork: function () { return false; }, + setupMaster: function () { return false; }, + isWorker: true, + isMaster: false, + schedulingPolicy: false, + settings: false, + worker: false, + workers: false, +}; + +const vcr = require('../../lib/appenders/recording'); + +const log4js = sandbox.require( + '../../lib/log4js', + { + requires: { + cluster: passengerCluster, + './appenders/recording': vcr + } + } +); + +test('When running in Passenger', (batch) => { + batch.test('it should still log', (t) => { + log4js.configure({ + appenders: { + vcr: { type: 'recording' } + }, + categories: { + default: { appenders: ['vcr'], level: 'info' } + }, + disableClustering: true + }); + log4js.getLogger().info('This should still work'); + + const events = vcr.replay(); + t.equal(events.length, 1); + t.equal(events[0].data[0], 'This should still work'); + t.end(); + }); + + batch.end(); +}); From e3913ce5d7c8d80a34a1f30d263173f4a10f6cb4 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 8 Feb 2018 08:18:19 +1100 Subject: [PATCH 13/34] chore(coverage): removed unnecessary check --- lib/configuration.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/configuration.js b/lib/configuration.js index 0a9e88b..86cea04 100644 --- a/lib/configuration.js +++ b/lib/configuration.js @@ -14,10 +14,8 @@ const validIdentifier = thing => /^[A-Za-z][A-Za-z0-9_]*$/g.test(thing); const anInteger = thing => thing && typeof thing === 'number' && Number.isInteger(thing); const addListener = (fn) => { - if (fn) { - listeners.push(fn); - debug(`Added listener, listeners now ${listeners.length}`); - } + listeners.push(fn); + debug(`Added listener, listeners now ${listeners.length}`); }; const throwExceptionIf = (config, checks, message) => { From 7234e3ada3affc701181c8013f906fe2dfb66604 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 8 Feb 2018 08:30:42 +1100 Subject: [PATCH 14/34] chore(coverage): removed unused check --- lib/logger.js | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/lib/logger.js b/lib/logger.js index 4519e83..55bdedf 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -39,13 +39,10 @@ class Logger { categories.setLevelForCategory(this.category, levels.getLevel(level, this.level)); } - log() { - /* eslint prefer-rest-params:0 */ - // todo: once node v4 support dropped, use rest parameter instead - const args = Array.from(arguments); - const logLevel = levels.getLevel(args[0], levels.INFO); + log(level, ...args) { + const logLevel = levels.getLevel(level, levels.INFO); if (this.isLevelEnabled(logLevel)) { - this._log(logLevel, args.slice(1)); + this._log(logLevel, args); } } @@ -83,13 +80,8 @@ function addLevelMethods(target) { return this.isLevelEnabled(level); }; - Logger.prototype[levelMethod] = function () { - /* eslint prefer-rest-params:0 */ - // todo: once node v4 support dropped, use rest parameter instead - const args = Array.from(arguments); - if (this.isLevelEnabled(level)) { - this._log(level, args); - } + Logger.prototype[levelMethod] = function (...args) { + this.log(level, ...args); }; } From f244b7f03fd5210c22a552abde2ff69e38362c99 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 9 Feb 2018 08:22:56 +1100 Subject: [PATCH 15/34] docs(tcp): added docs for tcp appenders --- docs/appenders.md | 4 +++- docs/index.md | 5 ++++- docs/multiprocess.md | 5 ++++- docs/tcp-server.md | 23 +++++++++++++++++++++++ docs/tcp.md | 22 ++++++++++++++++++++++ 5 files changed, 56 insertions(+), 3 deletions(-) create mode 100644 docs/tcp-server.md create mode 100644 docs/tcp.md diff --git a/docs/appenders.md b/docs/appenders.md index a44fec9..09b1c4a 100644 --- a/docs/appenders.md +++ b/docs/appenders.md @@ -35,13 +35,15 @@ The following appenders are included with log4js. Some require extra dependencie * [mailgun](mailgun.md) * [multiFile](multiFile.md) * [multiprocess](multiprocess.md) +* [rabbitmq](rabbitmq.md) * [recording](recording.md) * [redis](redis.md) * [slack](slack.md) * [smtp](smtp.md) * [stderr](stderr.md) * [stdout](stdout.md) -* [rabbitmq](rabbitmq.md) +* [tcp](tcp.md) +* [tcp-server](tcp-server.md) ## Other Appenders diff --git a/docs/index.md b/docs/index.md index 7fcef3d..5c28ac5 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,7 +16,7 @@ There have been a few changes between log4js 1.x and 2.x (and 0.x too). You shou * [Loggly appender](loggly.md) * [Logstash UDP appender](logstashUDP.md) * logFaces ([UDP](logFaces-UDP.md) and [HTTP](logFaces-HTTP.md)) appender -* [multiprocess appender](multiprocess.md) (useful when you've got multiple servers but want to centralise logging) +* [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging) * a [logger for connect/express](connect-logger.md) servers * configurable log message [layout/patterns](layouts.md) * different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.) @@ -38,6 +38,9 @@ logger.level = 'debug'; // default level is OFF - which means no logs at all. logger.debug("Some debug messages"); ``` +## Clustering +If you use node's cluster, or passenger, or pm2, then you should read this [clustering guide](clustering.md) + ## Note for library makers If you're writing a library and would like to include support for log4js, without introducing a dependency headache for your users, take a look at [log4js-api](https://github.com/log4js-node/log4js-api). diff --git a/docs/multiprocess.md b/docs/multiprocess.md index 821361d..a3f31e8 100644 --- a/docs/multiprocess.md +++ b/docs/multiprocess.md @@ -1,8 +1,11 @@ # Multiprocess Appender +*You probably want to use the [tcp server](tcp-server.md) or [tcp appender](tcp.md) instead of this - they are more flexible* + +*Note that if you're just using node core's `cluster` module then you don't need to use this appender - log4js will handle logging within the cluster transparently.* + The multiprocess appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. -Note that if you're just using node core's `cluster` module then you don't need to use this appender - log4js will handle logging within the cluster transparently. ## Configuration diff --git a/docs/tcp-server.md b/docs/tcp-server.md new file mode 100644 index 0000000..62a90d9 --- /dev/null +++ b/docs/tcp-server.md @@ -0,0 +1,23 @@ +# TCP Server Appender + +Strictly speaking, this is not an appender - but it is configured as one. The TCP server listens for log messages on a port, taking JSON-encoded log events and then forwarding them to the other appenders. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It is designed to work with the [tcp appender](tcp.md), but could work with anything that sends correctly formatted JSON log events. + +## Configuration + +* `type` - `tcp-server` +* `port` - `integer` (optional, defaults to `5000`) - the port to listen on +* `host` - `string` (optional, defaults to `localhost`) - the host/IP address to listen on + +## Example (master) +```javascript +log4js.configure({ + appenders: { + file: { type: 'file', filename: 'all-the-logs.log' }, + server: { type: 'tcp-server', host: '0.0.0.0' } + }, + categories: { + default: { appenders: ['file'], level: 'info' } + } +}); +``` +This creates a log server listening on port 5000, on all IP addresses the host has assigned to it. Note that the appender is not included in the appenders listed for the categories. All events received on the socket will be forwarded to the other appenders, as if they had originated on the same server. diff --git a/docs/tcp.md b/docs/tcp.md new file mode 100644 index 0000000..95438b8 --- /dev/null +++ b/docs/tcp.md @@ -0,0 +1,22 @@ +# TCP Appender + +The TCP appender sends log events to a master server over TCP sockets. It can be used as a simple way to centralise logging when you have multiple servers or processes. It uses the node.js core networking modules, and so does not require any extra dependencies. Remember to call `log4js.shutdown` when your application terminates, so that the sockets get closed cleanly. It's designed to work with the [tcp-server](tcp-server.md), but it doesn't necessarily have to, just make sure whatever is listening at the other end is expecting JSON objects as strings. + +## Configuration + +* `type` - `tcp` +* `port` - `integer` (optional, defaults to `5000`) - the port to send to +* `host` - `string` (optional, defaults to `localhost`) - the host/IP address to send to + +## Example +```javascript +log4js.configure({ + appenders: { + network: { type: 'tcp', host: 'log.server' } + }, + categories: { + default: { appenders: ['network'], level: 'error' } + } +}); +``` +This will send all error messages to `log.server:5000`. From 1e952ffd56631875aaf8757d20d93da6dda0833e Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 12 Feb 2018 07:42:29 +1100 Subject: [PATCH 16/34] docs(clustering): added clustering info --- docs/clustering.md | 28 ++++++++++++++++++++++++++++ docs/faq.md | 18 ++---------------- 2 files changed, 30 insertions(+), 16 deletions(-) create mode 100644 docs/clustering.md diff --git a/docs/clustering.md b/docs/clustering.md new file mode 100644 index 0000000..31cb9e7 --- /dev/null +++ b/docs/clustering.md @@ -0,0 +1,28 @@ +# Clustering / Multi-process Logging + +If you're running log4js in an application that uses [node's core cluster](https://nodejs.org/dist/latest-v8.x/docs/api/cluster.html) then log4js will transparently handle making sure the processes don't try to log at the same time. All logging is done on the master process, with the worker processes sending their log messages to the master via `process.send`. This ensures that you don't get multiple processes trying to write to the same file (or rotate the log files) at the same time. + +This can cause problems in some rare circumstances, if you're experiencing weird logging problems, then use the `disableClustering: true` option in your log4js configuration to have every process behave as if it were the master process. Be careful if you're logging to files. + +## I'm using PM2, but I'm not getting any logs! +To get log4js working with [PM2](http://pm2.keymetrics.io), you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module. +```bash +pm2 install pm2-intercom +``` +Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: ''` where `` should be replaced with the new name you gave the instance environment variable. +```javascript +log4js.configure({ + appenders: { out: { type: 'stdout'}}, + categories: { default: { appenders: ['out'], level: 'info'}}, + pm2: true, + pm2InstanceVar: 'INSTANCE_ID' +}); +``` + +## I'm using Passenger, but I'm not getting any logs! + +[Passenger](https://www.phusionpassenger.com/library/) replaces the node.js core cluster module with a non-functional stub, so you won't see any output using log4js. To fix this, add `disableClustering: true` to your configuration. Again, be careful if you're logging to files. + +## I'm not using clustering/pm2/passenger but I do have multiple processes that I'd like to all log to the same place + +Ok, you probably want to look at the [tcp-server](tcp-server.md) and [tcp appender](tcp.md) documentation. diff --git a/docs/faq.md b/docs/faq.md index 2e33348..a7348dc 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -35,23 +35,9 @@ const logger = log4js.getLogger('console'); console.log = logger.info.bind(logger); // do the same for others - console.debug, etc. ``` -## I'm using PM2, but I'm not getting any logs! -To get log4js working with PM2, you'll need to install the [pm2-intercom](https://www.npmjs.com/package/pm2-intercom) module. -```bash -pm2 install pm2-intercom -``` -Then add the value `pm2: true` to your log4js configuration. If you're also using `node-config`, then you'll probably have renamed your `NODE_APP_INSTANCE` environment variable. If so, you'll also need to add `pm2InstanceVar: ''` where `` should be replaced with the new name you gave the instance environment variable. -```javascript -log4js.configure({ - appenders: { out: { type: 'stdout'}}, - categories: { default: { appenders: ['out'], level: 'info'}}, - pm2: true, - pm2InstanceVar: 'INSTANCE_ID' -}); -``` +## I'm using pm2/passenger/some other third thing and I'm not getting any logs! -## FFS, why did you mess with the PM2 stuff? It was working fine for me! -You can turn off the clustering support, with the `disableClustering: true` option in your config. This will make log4js behave more like it did before version 2.x. Each worker process will log its own output, instead of sending it all to the master process. Be careful if you're logging to files though, this could result in weird behaviour. +Take a look at the [clustering](clustering.md) docs, they should help you out. ## NPM complains about nodemailer being deprecated, what should I do? From 83aca5b9d79c5dd9b236faf5fdc61c9fd73aa7f9 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 12 Feb 2018 07:52:21 +1100 Subject: [PATCH 17/34] fix(test): increase timeout on sighup test --- test/tap/file-sighup-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/tap/file-sighup-test.js b/test/tap/file-sighup-test.js index 7448ae6..de41b0b 100644 --- a/test/tap/file-sighup-test.js +++ b/test/tap/file-sighup-test.js @@ -45,5 +45,5 @@ test('file appender SIGHUP', (t) => { t.equal(openCalled, 1, 'open should be called once'); t.equal(closeCalled, 1, 'close should be called once'); t.end(); - }, 10); + }, 100); }); From 179587941ad96bf06460d137f1ee465200873a61 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 22 Feb 2018 08:09:13 +1100 Subject: [PATCH 18/34] chore: removed GELF appender --- docs/appenders.md | 9 +- docs/gelf.md | 53 ------- docs/index.md | 2 +- lib/appenders/gelf.js | 145 ------------------- test/tap/gelfAppender-test.js | 262 ---------------------------------- 5 files changed, 9 insertions(+), 462 deletions(-) delete mode 100644 docs/gelf.md delete mode 100644 lib/appenders/gelf.js delete mode 100644 test/tap/gelfAppender-test.js diff --git a/docs/appenders.md b/docs/appenders.md index 09b1c4a..1a1a82a 100644 --- a/docs/appenders.md +++ b/docs/appenders.md @@ -24,7 +24,6 @@ The following appenders are included with log4js. Some require extra dependencie * [dateFile](dateFile.md) * [file](file.md) * [fileSync](fileSync.md) -* [gelf](gelf.md) * [hipchat](hipchat.md) * [logFaces-HTTP](logFaces-HTTP.md) * [logFaces-UDP](logFaces-UDP.md) @@ -45,6 +44,14 @@ The following appenders are included with log4js. Some require extra dependencie * [tcp](tcp.md) * [tcp-server](tcp-server.md) +## Optional Appenders + +The following appenders are supported by log4js, but are no longer distributed with log4js core from version 3 onwards. + +* [gelf](https://github.com/log4js-node/gelf) + +For example, if you were previously using the gelf appender (`type: 'gelf'`) then you should add `@log4js-node/gelf` to your dependencies and change the type to `type: '@log4js-node/gelf'`. + ## Other Appenders Log4js can load appenders from outside the core appenders. The `type` config value is used as a require path if no matching appender can be found. For example, the following configuration will attempt to load an appender from the module 'cheese/appender', passing the rest of the config for the appender to that module: diff --git a/docs/gelf.md b/docs/gelf.md deleted file mode 100644 index e5c1a5b..0000000 --- a/docs/gelf.md +++ /dev/null @@ -1,53 +0,0 @@ -# GELF appender - -The GELF appender supports sending log messages over UDP to a [GELF](http://docs.graylog.org/en/2.2/pages/gelf.html) compatible server such as [Graylog](https://www.graylog.org). It uses node's core UDP support and does not require any other dependencies. If you use this appender, remember to call `log4js.shutdown` when your application terminates, so that all messages will have been sent to the server and the UDP socket can be closed. The appender supports passing custom fields to the server in both the config, and in individual log messages (see examples below). - -## Configuration - -* `type` - `gelf` -* `host` - `string` (defaults to `localhost`) - the gelf server hostname -* `port` - `integer` (defaults to `12201`) - the port the gelf server is listening on -* `hostname` - `string` (defaults to `OS.hostname()`) - the hostname used to identify the origin of the log messages. -* `facility` - `string` (optional) -* `customFields` - `object` (optional) - fields to be added to each log message; custom fields must start with an underscore. - -## Example (default config) -```javascript -log4js.configure({ - appenders: { - gelf: { type: 'gelf' } - }, - categories: { - default: { appenders: ['gelf'], level: 'info' } - } -}); -``` -This will send log messages to a server at `localhost:12201`. - -## Example (custom fields in config) -```javascript -log4js.configure({ - appenders: { - gelf: { type: 'gelf', host: 'gelf.server', customFields: { '_something': 'yep' } } - }, - categories: { - default: { appenders: ['gelf'], level: 'info' } - } -}); -``` -This will result in all log messages having the custom field `_something` set to 'yep'. - -# Example (custom fields in log message) -```javascript -log4js.configure({ - appenders: { - gelf: { type: 'gelf', customFields: { '_thing': 'isathing' } } - }, - categories: { - default: { appenders: ['gelf'], level: 'info' } - } -}); -const logger = log4js.getLogger(); -logger.error({ GELF: true, _thing2: 'alsoathing' }, 'oh no, something went wrong'); -``` -This will result in a log message with the custom fields `_thing` and `_thing2`. Note that log message custom fields will override config custom fields. diff --git a/docs/index.md b/docs/index.md index 5c28ac5..c691afe 100644 --- a/docs/index.md +++ b/docs/index.md @@ -12,7 +12,7 @@ There have been a few changes between log4js 1.x and 2.x (and 0.x too). You shou * coloured console logging to [stdout](stdout.md) or [stderr](stderr.md) * [file appender](file.md), with configurable log rolling based on file size or [date](dateFile.md) * [SMTP appender](smtp.md) -* [GELF appender](gelf.md) +* [GELF appender](https://github.com/log4js-node/gelf) * [Loggly appender](loggly.md) * [Logstash UDP appender](logstashUDP.md) * logFaces ([UDP](logFaces-UDP.md) and [HTTP](logFaces-HTTP.md)) appender diff --git a/lib/appenders/gelf.js b/lib/appenders/gelf.js deleted file mode 100644 index 05d8f51..0000000 --- a/lib/appenders/gelf.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict'; - -const zlib = require('zlib'); -// const levels = require('../levels'); -const dgram = require('dgram'); -const util = require('util'); -const OS = require('os'); -const debug = require('debug')('log4js:gelf'); - -/* eslint no-unused-vars:0 */ -const LOG_EMERG = 0; // system is unusable(unused) -const LOG_ALERT = 1; // action must be taken immediately(unused) -const LOG_CRIT = 2; // critical conditions -const LOG_ERROR = 3; // error conditions -const LOG_WARNING = 4; // warning conditions -const LOG_NOTICE = 5; // normal, but significant, condition(unused) -const LOG_INFO = 6; // informational message -const LOG_DEBUG = 7; // debug-level message - -/** - * GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog - * - * @param layout a function that takes a logevent and returns a string (defaults to none). - * @param config.host - host to which to send logs (default:localhost) - * @param config.port - port at which to send logs to (default:12201) - * @param config.hostname - hostname of the current host (default:OS hostname) - * @param config.facility - facility to log to (default:nodejs-server) - */ -/* eslint no-underscore-dangle:0 */ -function gelfAppender(layout, config, levels) { - const levelMapping = {}; - levelMapping[levels.ALL] = LOG_DEBUG; - levelMapping[levels.TRACE] = LOG_DEBUG; - levelMapping[levels.DEBUG] = LOG_DEBUG; - levelMapping[levels.INFO] = LOG_INFO; - levelMapping[levels.WARN] = LOG_WARNING; - levelMapping[levels.ERROR] = LOG_ERROR; - levelMapping[levels.FATAL] = LOG_CRIT; - - const host = config.host || 'localhost'; - const port = config.port || 12201; - const hostname = config.hostname || OS.hostname(); - const facility = config.facility; - const customFields = config.customFields; - - const defaultCustomFields = customFields || {}; - - if (facility) { - defaultCustomFields._facility = facility; - } - - const client = dgram.createSocket('udp4'); - - process.on('exit', () => { - if (client) client.close(); - }); - - /** - * Add custom fields (start with underscore ) - * - if the first object passed to the logger contains 'GELF' field, - * copy the underscore fields to the message - * @param loggingEvent - * @param msg - */ - function addCustomFields(loggingEvent, msg) { - /* append defaultCustomFields firsts */ - Object.keys(defaultCustomFields).forEach((key) => { - // skip _id field for graylog2, skip keys not starts with UNDERSCORE - if (key.match(/^_/) && key !== '_id') { - msg[key] = defaultCustomFields[key]; - } - }); - - /* append custom fields per message */ - const data = loggingEvent.data; - if (!Array.isArray(data) || data.length === 0) return; - const firstData = data[0]; - if (firstData) { - if (!firstData.GELF) return; // identify with GELF field defined - // Remove the GELF key, some gelf supported logging systems drop the message with it - delete firstData.GELF; - Object.keys(firstData).forEach((key) => { - // skip _id field for graylog2, skip keys not starts with UNDERSCORE - if (key.match(/^_/) || key !== '_id') { - msg[key] = firstData[key]; - } - }); - - /* the custom field object should be removed, so it will not be looged by the later appenders */ - loggingEvent.data.shift(); - } - } - - function preparePacket(loggingEvent) { - const msg = {}; - addCustomFields(loggingEvent, msg); - msg.short_message = layout(loggingEvent); - - msg.version = '1.1'; - msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond - msg.host = hostname; - msg.level = levelMapping[loggingEvent.level || levels.DEBUG]; - return msg; - } - - function sendPacket(packet) { - client.send(packet, 0, packet.length, port, host, (err) => { - if (err) { - console.error(err); - } - }); - } - - const app = (loggingEvent) => { - const message = preparePacket(loggingEvent); - zlib.gzip(Buffer.from(JSON.stringify(message)), (err, packet) => { - if (err) { - console.error(err.stack); - } else { - if (packet.length > 8192) { // eslint-disable-line - debug(`Message packet length (${packet.length}) is larger than 8k. Not sending`); - } else { - sendPacket(packet); - } - } - }); - }; - app.shutdown = function (cb) { - if (client) { - client.close(cb); - } - }; - - return app; -} - -function configure(config, layouts, findAppender, levels) { - let layout = layouts.messagePassThroughLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return gelfAppender(layout, config, levels); -} - -module.exports.configure = configure; diff --git a/test/tap/gelfAppender-test.js b/test/tap/gelfAppender-test.js deleted file mode 100644 index 562ec70..0000000 --- a/test/tap/gelfAppender-test.js +++ /dev/null @@ -1,262 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const realLayouts = require('../../lib/layouts'); - -const setupLogging = function (options, category, compressedLength) { - const fakeDgram = { - sent: false, - socket: { - packetLength: 0, - closed: false, - close: function (cb) { - this.closed = true; - if (cb) cb(); - }, - send: function (pkt, offset, pktLength, port, host) { - fakeDgram.sent = true; - this.packet = pkt; - this.offset = offset; - this.packetLength = pktLength; - this.port = port; - this.host = host; - } - }, - createSocket: function (type) { - this.type = type; - return this.socket; - } - }; - - const fakeZlib = { - gzip: function (objectToCompress, callback) { - fakeZlib.uncompressed = objectToCompress; - if (this.shouldError) { - callback({ stack: 'oh noes' }); - return; - } - - if (compressedLength) { - callback(null, { length: compressedLength }); - } else { - callback(null, "I've been compressed"); - } - } - }; - - let exitHandler; - - const fakeConsole = { - log: () => {}, - error: function (message) { - this.message = message; - } - }; - - const fakeLayouts = { - layout: function (type, opt) { - this.type = type; - this.options = opt; - return realLayouts.messagePassThroughLayout; - }, - messagePassThroughLayout: realLayouts.messagePassThroughLayout - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - dgram: fakeDgram, - zlib: fakeZlib, - './layouts': fakeLayouts - }, - globals: { - process: { - version: process.version, - on: function (evt, handler) { - if (evt === 'exit') { - exitHandler = handler; - } - }, - removeListener: () => {}, - env: {}, - stderr: process.stderr - }, - console: fakeConsole - } - }); - - options = options || {}; - options.type = 'gelf'; - - log4js.configure({ - appenders: { gelf: options }, - categories: { default: { appenders: ['gelf'], level: 'debug' } } - }); - - return { - dgram: fakeDgram, - compress: fakeZlib, - exitHandler: exitHandler, - console: fakeConsole, - layouts: fakeLayouts, - logger: log4js.getLogger(category || 'gelf-test'), - log4js: log4js - }; -}; - -test('log4js gelfAppender', (batch) => { - batch.test('with default gelfAppender settings', (t) => { - const setup = setupLogging(); - setup.logger.info('This is a test'); - - const dgram = setup.dgram; - - t.test('dgram packet should be sent via udp to the localhost gelf server', (assert) => { - assert.equal(dgram.type, 'udp4'); - assert.equal(dgram.socket.host, 'localhost'); - assert.equal(dgram.socket.port, 12201); - assert.equal(dgram.socket.offset, 0); - assert.ok(dgram.socket.packetLength > 0, 'Received blank message'); - assert.equal(dgram.socket.packet, "I've been compressed"); - assert.end(); - }); - - const message = JSON.parse(setup.compress.uncompressed); - t.test('the uncompressed log message should be in the gelf format', (assert) => { - assert.equal(message.version, '1.1'); - assert.equal(message.host, require('os').hostname()); - assert.equal(message.level, 6); // INFO - assert.equal(message.short_message, 'This is a test'); - assert.end(); - }); - t.end(); - }); - - batch.test('with a message longer than 8k', (t) => { - const setup = setupLogging(undefined, undefined, 10240); - setup.logger.info('Blah.'); - - t.equal(setup.dgram.sent, false, 'the dgram packet should not be sent'); - t.end(); - }); - - batch.test('with a null log message', (t) => { - const setup = setupLogging(); - setup.logger.info(null); - - t.ok(setup.dgram.sent); - - const msg = JSON.parse(setup.compress.uncompressed); - t.equal(msg.level, 6); - t.equal(msg.short_message, 'null'); - t.end(); - }); - - batch.test('with non-default options', (t) => { - const setup = setupLogging({ - host: 'somewhere', - port: 12345, - hostname: 'cheese', - facility: 'nonsense' - }); - setup.logger.debug('Just testing.'); - - const dgram = setup.dgram; - t.test('the dgram packet should pick up the options', (assert) => { - assert.equal(dgram.socket.host, 'somewhere'); - assert.equal(dgram.socket.port, 12345); - assert.end(); - }); - - const message = JSON.parse(setup.compress.uncompressed); - t.test('the uncompressed packet should pick up the options', (assert) => { - assert.equal(message.host, 'cheese'); - assert.equal(message._facility, 'nonsense'); - assert.end(); - }); - - t.end(); - }); - - batch.test('on process.exit should close open sockets', (t) => { - const setup = setupLogging(); - setup.exitHandler(); - - t.ok(setup.dgram.socket.closed); - t.end(); - }); - - batch.test('on shutdown should close open sockets', (t) => { - const setup = setupLogging(); - setup.log4js.shutdown(() => { - t.ok(setup.dgram.socket.closed); - t.end(); - }); - }); - - batch.test('on zlib error should output to console.error', (t) => { - const setup = setupLogging(); - setup.compress.shouldError = true; - setup.logger.info('whatever'); - - t.equal(setup.console.message, 'oh noes'); - t.end(); - }); - - batch.test('with layout in configuration', (t) => { - const setup = setupLogging({ - layout: { - type: 'madeuplayout', - earlgrey: 'yes, please' - } - }); - - t.test('should pass options to layout', (assert) => { - assert.equal(setup.layouts.type, 'madeuplayout'); - assert.equal(setup.layouts.options.earlgrey, 'yes, please'); - assert.end(); - }); - t.end(); - }); - - batch.test('with custom fields options', (t) => { - const setup = setupLogging({ - host: 'somewhere', - port: 12345, - hostname: 'cheese', - facility: 'nonsense', - customFields: { - _every1: 'Hello every one', - _every2: 'Hello every two' - } - }); - const myFields = { - GELF: true, - _every2: 'Overwritten!', - _myField: 'This is my field!' - }; - setup.logger.debug(myFields, 'Just testing.'); - - const dgram = setup.dgram; - t.test('the dgram packet should pick up the options', (assert) => { - assert.equal(dgram.socket.host, 'somewhere'); - assert.equal(dgram.socket.port, 12345); - assert.end(); - }); - - const message = JSON.parse(setup.compress.uncompressed); - t.test('the uncompressed packet should pick up the options', (assert) => { - assert.equal(message.host, 'cheese'); - assert.notOk(message.GELF); // make sure flag was removed - assert.equal(message._facility, 'nonsense'); - assert.equal(message._every1, 'Hello every one'); // the default value - assert.equal(message._every2, 'Overwritten!'); // the overwritten value - assert.equal(message._myField, 'This is my field!'); // the value for this message only - assert.equal(message.short_message, 'Just testing.'); // skip the field object - assert.end(); - }); - t.end(); - }); - - batch.end(); -}); From 03a224da846664a1c50a55b35abb41004c429255 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 23 Feb 2018 08:07:02 +1100 Subject: [PATCH 19/34] fix(types): remove gelf from types --- types/log4js.d.ts | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/types/log4js.d.ts b/types/log4js.d.ts index e0ad4fb..0ab8916 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -167,19 +167,6 @@ export interface DateFileAppender { daysToKeep?: number; } -export interface GELFAppender { - 'type': 'gelf'; - // (defaults to localhost) - the gelf server hostname - host?: string; - // (defaults to 12201) - the port the gelf server is listening on - port?: number; - // (defaults to OS.hostname()) - the hostname used to identify the origin of the log messages. - hostname?: string; - facility?: string; - // fields to be added to each log message; custom fields must start with an underscore. - customFields?: { [field: string]: any }; -} - export interface HipchatAppender { type: 'hipchat'; // User token with notification privileges @@ -389,7 +376,6 @@ export type Appender = CategoryFilterAppender | FileAppender | SyncfileAppender | DateFileAppender - | GELFAppender | HipchatAppender | LogFacesHTTPAppender | LogFacesUDPAppender From 1fd96d0ab366e99f7de3b408906603b1d560d6b7 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 26 Feb 2018 08:31:23 +1100 Subject: [PATCH 20/34] chore: removed loggly appender --- docs/appenders.md | 10 --- lib/appenders/loggly.js | 127 -------------------------------- package.json | 1 - test/tap/logglyAppender-test.js | 118 ----------------------------- 4 files changed, 256 deletions(-) delete mode 100644 lib/appenders/loggly.js delete mode 100644 test/tap/logglyAppender-test.js diff --git a/docs/appenders.md b/docs/appenders.md index 6811e48..737e228 100644 --- a/docs/appenders.md +++ b/docs/appenders.md @@ -47,21 +47,11 @@ The following appenders are included with log4js. Some require extra dependencie The following appenders are supported by log4js, but are no longer distributed with log4js core from version 3 onwards. -* [gelf](https://github.com/log4js-node/gelf) - -For example, if you were previously using the gelf appender (`type: 'gelf'`) then you should add `@log4js-node/gelf` to your dependencies and change the type to `type: '@log4js-node/gelf'`. - -## Optional Appenders - -The following appenders are supported by log4js, but will issue deprecation warnings from version 2.6 onwards - they will be removed from the log4js core in version 3. If you are using these appenders, you should alter your dependencies to include them explicitly. - * [gelf](https://github.com/log4js-node/gelf) * [loggly](https://github.com/log4js-node/loggly) For example, if you were previously using the gelf appender (`type: 'gelf'`) then you should add `@log4js-node/gelf` to your dependencies and change the type to `type: '@log4js-node/gelf'`. -To turn off the deprecation warnings, add `deprecationWarnings: false` to your log4js config. The core version of the appender will still work. But note that you will have to install the external appenders when version 3 is released as they will not be included at all. - ## Other Appenders Log4js can load appenders from outside the core appenders. The `type` config value is used as a require path if no matching appender can be found. For example, the following configuration will attempt to load an appender from the module 'cheese/appender', passing the rest of the config for the appender to that module: diff --git a/lib/appenders/loggly.js b/lib/appenders/loggly.js deleted file mode 100644 index 285888d..0000000 --- a/lib/appenders/loggly.js +++ /dev/null @@ -1,127 +0,0 @@ -/* eslint no-prototype-builtins:1,no-restricted-syntax:[1, "ForInStatement"] */ - -'use strict'; - -/** - * This appender has been deprecated. - * Updates and bug fixes should be made against https://github.com/log4js-node/loggly - */ -const debug = require('debug')('log4js:loggly'); -const loggly = require('loggly'); -const os = require('os'); - -function isAnyObject(value) { - return value !== null && (typeof value === 'object' || typeof value === 'function'); -} - -function numKeys(obj) { - return Object.keys(obj).length; -} - -/** - * @param msgListArgs - * @returns Object{ deTaggedMsg: [...], additionalTags: [...] } - */ -function processTags(msgListArgs) { - const msgList = (msgListArgs.length === 1 ? [msgListArgs[0]] : msgListArgs); - - return msgList.reduce((accumulate, element) => { - if (isAnyObject(element) && Array.isArray(element.tags) && numKeys(element) === 1) { - accumulate.additionalTags = accumulate.additionalTags.concat(element.tags); - } else { - accumulate.deTaggedData.push(element); - } - return accumulate; - }, { deTaggedData: [], additionalTags: [] }); -} - -/** - * Loggly Appender. Sends logging events to Loggly using node-loggly, optionally adding tags. - * - * This appender will scan the msg from the logging event, and pull out any argument of the - * shape `{ tags: [] }` so that it's possible to add tags in a normal logging call. - * - * For example: - * - * logger.info({ tags: ['my-tag-1', 'my-tag-2'] }, 'Some message', someObj, ...) - * - * And then this appender will remove the tags param and append it to the config.tags. - * - * @param config object with loggly configuration data - * { - * token: 'your-really-long-input-token', - * subdomain: 'your-subdomain', - * tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn'] - * } - * @param layout a function that takes a logevent and returns a string (defaults to objectLayout). - */ -function logglyAppender(config, layout) { - const client = loggly.createClient(config); - let openRequests = 0; - let shutdownCB; - - debug('creating appender.'); - - function app(loggingEvent) { - const result = processTags(loggingEvent.data); - const deTaggedData = result.deTaggedData; - const additionalTags = result.additionalTags; - - // Replace the data property with the deTaggedData - loggingEvent.data = deTaggedData; - - const msg = layout(loggingEvent); - - openRequests += 1; - debug('sending log event to loggly'); - client.log( - { - msg: msg, - level: loggingEvent.level.levelStr, - category: loggingEvent.categoryName, - hostname: os.hostname().toString(), - }, - additionalTags, - (error) => { - if (error) { - console.error('log4js.logglyAppender - error occurred: ', error); - } - - debug('log event received by loggly.'); - - openRequests -= 1; - - if (shutdownCB && openRequests === 0) { - shutdownCB(); - - shutdownCB = undefined; - } - } - ); - } - - app.shutdown = function (cb) { - debug('shutdown called'); - if (openRequests === 0) { - cb(); - } else { - shutdownCB = cb; - } - }; - - // trigger a deprecation warning, with a pointer to the replacement lib - app.deprecated = '@log4js-node/loggly'; - - return app; -} - -function configure(config, layouts) { - let layout = layouts.messagePassThroughLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - debug('configuring new appender'); - return logglyAppender(config, layout); -} - -module.exports.configure = configure; diff --git a/package.json b/package.json index 4262ffb..097f6de 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,6 @@ }, "optionalDependencies": { "hipchat-notifier": "^1.1.0", - "loggly": "^1.1.0", "mailgun-js": "^0.7.0", "nodemailer": "^2.5.0", "redis": "^2.7.1", diff --git a/test/tap/logglyAppender-test.js b/test/tap/logglyAppender-test.js deleted file mode 100644 index ed27830..0000000 --- a/test/tap/logglyAppender-test.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const layouts = require('../../lib/layouts'); - -function setupLogging(category, options) { - const msgs = []; - - const fakeLoggly = { - createClient: function (opts) { - return { - config: opts, - log: function (msg, tags, cb) { - msgs.push({ - msg: msg, - tags: tags, - cb: cb - }); - } - }; - } - }; - - const fakeLayouts = { - layout: function (type, config) { - this.type = type; - this.config = config; - return layouts.messagePassThroughLayout; - }, - basicLayout: layouts.basicLayout, - messagePassThroughLayout: layouts.messagePassThroughLayout - }; - - const fakeConsole = { - log: () => {}, - errors: [], - error: function (msg, value) { - this.errors.push({ msg: msg, value: value }); - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - loggly: fakeLoggly, - './layouts': fakeLayouts - }, - globals: { - console: fakeConsole - } - }); - - options = options || {}; - options.type = 'loggly'; - - log4js.configure({ - appenders: { loggly: options }, - categories: { default: { appenders: ['loggly'], level: 'trace' } } - }); - - return { - log4js: log4js, - logger: log4js.getLogger(category), - loggly: fakeLoggly, - layouts: fakeLayouts, - console: fakeConsole, - results: msgs - }; -} - -function setupTaggedLogging() { - return setupLogging('loggly', { - token: 'your-really-long-input-token', - subdomain: 'your-subdomain', - tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn'] - }); -} - -test('log4js logglyAppender', (batch) => { - batch.test('with minimal config', (t) => { - const setup = setupTaggedLogging(); - setup.logger.log('trace', 'Log event #1', 'Log 2', { tags: ['tag1', 'tag2'] }); - - t.equal(setup.results.length, 1, 'has a results.length of 1'); - t.equal(setup.results[0].msg.msg, 'Log event #1 Log 2', 'has a result msg with both args concatenated'); - t.same(setup.results[0].tags, ['tag1', 'tag2'], 'has the correct result tags'); - t.end(); - }); - - batch.test('config with object with tags and other keys', (t) => { - const setup = setupTaggedLogging(); - // ignore this tags object b/c there are 2 keys - setup.logger.log('trace', 'Log event #1', { other: 'other', tags: ['tag1', 'tag2'] }); - - t.equal(setup.results.length, 1, 'has a results.length of 1'); - t.equal( - setup.results[0].msg.msg, - 'Log event #1 { other: \'other\', tags: [ \'tag1\', \'tag2\' ] }', - 'has a result msg with the args concatenated' - ); - t.same(setup.results[0].tags, [], 'has a result tags with the arg that contains no tags'); - t.end(); - }); - - batch.test('with shutdown callback', (t) => { - const setup = setupTaggedLogging(); - setup.logger.log('trace', 'Log event #1', 'Log 2', { - tags: ['tag1', 'tag2'] - }); - - setup.log4js.shutdown(() => { t.end(); }); - - // shutdown will wait until after the last message has been sent to loggly - setup.results[0].cb(); - }); - - batch.end(); -}); From 28af2158b6a92eff0d1e4439a9a5d003c4f98780 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 27 Feb 2018 07:14:48 +1100 Subject: [PATCH 21/34] chore: removed loggly from types --- types/log4js.d.ts | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/types/log4js.d.ts b/types/log4js.d.ts index 0ab8916..7d343cd 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -205,16 +205,6 @@ export interface LogFacesUDPAppender { application?: string; } -export interface LogglyAppender { - type: 'loggly'; - // your really long input token - token: string; - // your subdomain - subdomain: string; - // tags to include in every log message - tags?: string[]; -} - export interface LogLevelFilterAppender { type: 'logLevelFilter'; // the name of an appender, defined in the same configuration, that you want to filter @@ -379,7 +369,6 @@ export type Appender = CategoryFilterAppender | HipchatAppender | LogFacesHTTPAppender | LogFacesUDPAppender - | LogglyAppender | LogLevelFilterAppender | LogstashUDPAppender | MailgunAppender From 5fa965242345c0ca43e4a0be3b4fec0bec305743 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 5 Mar 2018 08:29:31 +1100 Subject: [PATCH 22/34] chore: removed hipchat appender --- lib/appenders/hipchat.js | 87 -------------------- package.json | 1 - test/tap/hipchatAppender-test.js | 136 ------------------------------- 3 files changed, 224 deletions(-) delete mode 100644 lib/appenders/hipchat.js delete mode 100644 test/tap/hipchatAppender-test.js diff --git a/lib/appenders/hipchat.js b/lib/appenders/hipchat.js deleted file mode 100644 index 6e60790..0000000 --- a/lib/appenders/hipchat.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict'; - -const hipchat = require('hipchat-notifier'); - -/** - @invoke as - - log4js.configure({ - 'appenders': { 'hipchat': - { - 'type' : 'hipchat', - 'hipchat_token': '< User token with Notification Privileges >', - 'hipchat_room': '< Room ID or Name >', - // optionl - 'hipchat_from': '[ additional from label ]', - 'hipchat_notify': '[ notify boolean to bug people ]', - 'hipchat_host' : 'api.hipchat.com' - } - }, - categories: { default: { appenders: ['hipchat'], level: 'debug' }} - }); - - var logger = log4js.getLogger('hipchat'); - logger.warn('Test Warn message'); - - @invoke - */ - -function hipchatNotifierResponseCallback(err) { - if (err) { - throw err; - } -} - -function hipchatAppender(config, layout) { - const notifier = hipchat.make(config.hipchat_room, config.hipchat_token); - - return (loggingEvent) => { - let notifierFn; - - notifier.setRoom(config.hipchat_room); - notifier.setFrom(config.hipchat_from || ''); - notifier.setNotify(config.hipchat_notify || false); - - if (config.hipchat_host) { - notifier.setHost(config.hipchat_host); - } - - switch (loggingEvent.level.toString()) { - case 'TRACE': - case 'DEBUG': - notifierFn = 'info'; - break; - case 'WARN': - notifierFn = 'warning'; - break; - case 'ERROR': - case 'FATAL': - notifierFn = 'failure'; - break; - default: - notifierFn = 'success'; - } - - // @TODO, re-work in timezoneOffset ? - const layoutMessage = layout(loggingEvent); - - // dispatch hipchat api request, do not return anything - // [overide hipchatNotifierResponseCallback] - notifier[notifierFn](layoutMessage, config.hipchat_response_callback || - hipchatNotifierResponseCallback); - }; -} - -function hipchatConfigure(config, layouts) { - let layout = layouts.messagePassThroughLayout; - - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - - const appender = hipchatAppender(config, layout); - appender.deprecated = '@log4js-node/hipchat'; - return appender; -} - -module.exports.configure = hipchatConfigure; diff --git a/package.json b/package.json index 097f6de..31addfa 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,6 @@ "validate-commit-msg": "^2.14.0" }, "optionalDependencies": { - "hipchat-notifier": "^1.1.0", "mailgun-js": "^0.7.0", "nodemailer": "^2.5.0", "redis": "^2.7.1", diff --git a/test/tap/hipchatAppender-test.js b/test/tap/hipchatAppender-test.js deleted file mode 100644 index 09f92fd..0000000 --- a/test/tap/hipchatAppender-test.js +++ /dev/null @@ -1,136 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); - -function setupLogging(category, options) { - const lastRequest = {}; - - const fakeRequest = function (args, level) { - lastRequest.notifier = this; - lastRequest.body = args[0]; - lastRequest.callback = args[1]; - lastRequest.level = level; - }; - - const fakeHipchatNotifier = { - make: function (room, token, from, host, notify) { - return { - room: room, - token: token, - from: from || '', - host: host || 'api.hipchat.com', - notify: notify || false, - setRoom: function (val) { - this.room = val; - }, - setFrom: function (val) { - this.from = val; - }, - setHost: function (val) { - this.host = val; - }, - setNotify: function (val) { - this.notify = val; - }, - info: function () { - fakeRequest.call(this, arguments, 'info'); - }, - warning: function () { - fakeRequest.call(this, arguments, 'warning'); - }, - failure: function () { - fakeRequest.call(this, arguments, 'failure'); - }, - success: function () { - fakeRequest.call(this, arguments, 'success'); - } - }; - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - 'hipchat-notifier': fakeHipchatNotifier - } - }); - - options = options || {}; - options.type = 'hipchat'; - - log4js.configure({ - appenders: { hipchat: options }, - categories: { default: { appenders: ['hipchat'], level: 'debug' } } - }); - - return { - logger: log4js.getLogger(category), - lastRequest: lastRequest - }; -} - -test('HipChat appender', (batch) => { - batch.test('when logging to HipChat v2 API', (t) => { - const customCallback = function () { - return 'works'; - }; - - const topic = setupLogging('myCategory', { - type: 'hipchat', - hipchat_token: 'User_Token_With_Notification_Privs', - hipchat_room: 'Room_ID_Or_Name', - hipchat_from: 'Log4js_Test', - hipchat_notify: true, - hipchat_host: 'hipchat.your-company.tld', - hipchat_response_callback: customCallback - }); - topic.logger.warn('Log event #1'); - - t.test('a request to hipchat_host should be sent', (assert) => { - assert.equal(topic.lastRequest.notifier.host, 'hipchat.your-company.tld'); - assert.equal(topic.lastRequest.notifier.notify, true); - assert.equal(topic.lastRequest.body, 'Log event #1'); - assert.equal(topic.lastRequest.level, 'warning'); - assert.end(); - }); - - t.equal(topic.lastRequest.callback(), 'works', 'a custom callback to the HipChat response is supported'); - t.end(); - }); - - batch.test('when missing options', (t) => { - const topic = setupLogging('myLogger', { - type: 'hipchat', - }); - topic.logger.error('Log event #2'); - - t.test('it sets some defaults', (assert) => { - assert.equal(topic.lastRequest.notifier.host, 'api.hipchat.com'); - assert.equal(topic.lastRequest.notifier.notify, false); - assert.equal(topic.lastRequest.body, 'Log event #2'); - assert.equal(topic.lastRequest.level, 'failure'); - assert.end(); - }); - t.end(); - }); - - batch.test('when basicLayout is provided', (t) => { - const topic = setupLogging('myLogger', { - type: 'hipchat', - layout: { type: 'basic' } - }); - topic.logger.debug('Log event #3'); - - t.test('it should include the timestamp', (assert) => { - // basicLayout adds [TIMESTAMP] [LEVEL] category - message - // e.g. [2016-06-10 11:50:53.819] [DEBUG] myLogger - Log event #23 - - assert.match(topic.lastRequest.body, /^\[[^\]]+] \[[^\]]+].*Log event #3$/); - assert.equal(topic.lastRequest.level, 'info'); - assert.end(); - }); - t.end(); - }); - - batch.end(); -}); From 1a591edcfa7e71e2e1cf610940fce73f6a6d3a5a Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 8 Mar 2018 08:09:32 +1100 Subject: [PATCH 23/34] chore: remove logstash udp --- docs/index.md | 2 +- lib/appenders/logstashUDP.js | 111 -------------- test/tap/logstashUDP-test.js | 271 ----------------------------------- types/log4js.d.ts | 17 --- 4 files changed, 1 insertion(+), 400 deletions(-) delete mode 100755 lib/appenders/logstashUDP.js delete mode 100644 test/tap/logstashUDP-test.js diff --git a/docs/index.md b/docs/index.md index c2826b8..c183b62 100644 --- a/docs/index.md +++ b/docs/index.md @@ -14,7 +14,7 @@ There have been a few changes between log4js 1.x and 2.x (and 0.x too). You shou * [SMTP appender](smtp.md) * [GELF appender](https://github.com/log4js-node/gelf) * [Loggly appender](https://github.com/log4js-node/loggly) -* [Logstash UDP appender](logstashUDP.md) +* [Logstash UDP appender](https://github.com/log4js-node/logstashUDP) * logFaces ([UDP](logFaces-UDP.md) and [HTTP](logFaces-HTTP.md)) appender * [TCP appender](tcp.md) (useful when you've got multiple servers but want to centralise logging) * a [logger for connect/express](connect-logger.md) servers diff --git a/lib/appenders/logstashUDP.js b/lib/appenders/logstashUDP.js deleted file mode 100755 index aefbca3..0000000 --- a/lib/appenders/logstashUDP.js +++ /dev/null @@ -1,111 +0,0 @@ -'use strict'; - -const dgram = require('dgram'); -const util = require('util'); - -function sendLog(udp, host, port, logObject) { - const buffer = Buffer.from(JSON.stringify(logObject)); - - /* eslint no-unused-vars:0 */ - udp.send(buffer, 0, buffer.length, port, host, (err, bytes) => { - if (err) { - console.error('log4js.logstashUDP - %s:%p Error: %s', host, port, util.inspect(err)); - } - }); -} - - -function logstashUDP(config, layout) { - const udp = dgram.createSocket('udp4'); - const type = config.logType ? config.logType : config.category; - - if (!config.fields) { - config.fields = {}; - } - - function checkArgs(argsValue, logUnderFields) { - if ((!argsValue) || (argsValue === 'both')) { - return true; - } - - if (logUnderFields && (argsValue === 'fields')) { - return true; - } - - if ((!logUnderFields) && (argsValue === 'direct')) { - return true; - } - - return false; - } - - function log(loggingEvent) { - /* - https://gist.github.com/jordansissel/2996677 - { - 'message' => 'hello world', - '@version' => '1', - '@timestamp' => '2014-04-22T23:03:14.111Z', - 'type' => 'stdin', - 'host' => 'hello.local' - } - @timestamp is the ISO8601 high-precision timestamp for the event. - @version is the version number of this json schema - Every other field is valid and fine. - */ - - const fields = {}; - Object.keys(config.fields).forEach((key) => { - fields[key] = typeof config.fields[key] === 'function' ? config.fields[key](loggingEvent) : config.fields[key]; - }); - - /* eslint no-prototype-builtins:1,no-restricted-syntax:[1, "ForInStatement"] */ - if (loggingEvent.data.length > 1) { - const secondEvData = loggingEvent.data[1]; - if ((secondEvData !== undefined) && (secondEvData !== null)) { - Object.keys(secondEvData).forEach((key) => { - fields[key] = secondEvData[key]; - }); - } - } - fields.level = loggingEvent.level.levelStr; - fields.category = loggingEvent.categoryName; - - const logObject = { - '@version': '1', - '@timestamp': (new Date(loggingEvent.startTime)).toISOString(), - type: type, - message: layout(loggingEvent) - }; - - if (checkArgs(config.args, true)) { - logObject.fields = fields; - } - - if (checkArgs(config.args, false)) { - Object.keys(fields).forEach((key) => { - logObject[key] = fields[key]; - }); - } - - sendLog(udp, config.host, config.port, logObject); - } - - log.shutdown = function (cb) { - udp.close(cb); - }; - - log.deprecated = '@log4js-node/logstashudp'; - return log; -} - -function configure(config, layouts) { - let layout = layouts.dummyLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - - return logstashUDP(config, layout); -} - -module.exports.configure = configure; diff --git a/test/tap/logstashUDP-test.js b/test/tap/logstashUDP-test.js deleted file mode 100644 index ccd9b65..0000000 --- a/test/tap/logstashUDP-test.js +++ /dev/null @@ -1,271 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); - -function setupLogging(category, options) { - const udpSent = {}; - const socket = { closed: false }; - - const fakeDgram = { - createSocket: function () { - return { - send: function (buffer, offset, length, port, host, callback) { - udpSent.date = new Date(); - udpSent.host = host; - udpSent.port = port; - udpSent.length = length; - udpSent.offset = 0; - udpSent.buffer = buffer; - callback(undefined, length); - }, - close: function (cb) { - socket.closed = true; - cb(); - } - }; - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - dgram: fakeDgram - } - }); - - options = options || {}; - options.type = 'logstashUDP'; - log4js.configure({ - appenders: { logstash: options }, - categories: { default: { appenders: ['logstash'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - log4js: log4js, - results: udpSent, - socket: socket - }; -} - -test('logstashUDP appender', (batch) => { - batch.test('a UDP packet should be sent', (t) => { - const setup = setupLogging('myCategory', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - logType: 'myAppType', - category: 'myLogger', - fields: { - field1: 'value1', - field2: 'value2' - }, - layout: { - type: 'pattern', - pattern: '%m' - } - }); - setup.logger.log('trace', 'Log event #1'); - - t.equal(setup.results.host, '127.0.0.1'); - t.equal(setup.results.port, 10001); - t.equal(setup.results.offset, 0); - - const json = JSON.parse(setup.results.buffer.toString()); - t.equal(json.type, 'myAppType'); - const fields = { - field1: 'value1', - field2: 'value2', - level: 'TRACE', - category: 'myCategory' - }; - - const keys = Object.keys(fields); - for (let i = 0, length = keys.length; i < length; i += 1) { - t.equal(json[keys[i]], fields[keys[i]]); - } - - t.equal(JSON.stringify(json.fields), JSON.stringify(fields)); - t.equal(json.message, 'Log event #1'); - // Assert timestamp, up to hours resolution. - const date = new Date(json['@timestamp']); - t.equal( - date.toISOString().substring(0, 14), - setup.results.date.toISOString().substring(0, 14) - ); - - t.end(); - }); - - batch.test('default options', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - layout: { - type: 'pattern', - pattern: '%m' - } - }); - setup.logger.log('trace', 'Log event #1'); - - const json = JSON.parse(setup.results.buffer.toString()); - t.equal(json.type, 'myLogger'); - t.equal( - JSON.stringify(json.fields), - JSON.stringify({ level: 'TRACE', category: 'myLogger' }) - ); - - t.end(); - }); - - batch.test('configuration can include functions to generate field values at run-time', (t) => { - const setup = setupLogging('myCategory', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - logType: 'myAppType', - category: 'myLogger', - fields: { - field1: 'value1', - field2: function () { - return 'evaluated at runtime'; - } - }, - layout: { - type: 'pattern', - pattern: '%m' - } - }); - setup.logger.log('trace', 'Log event #1'); - - const json = JSON.parse(setup.results.buffer.toString()); - t.equal(json.fields.field1, 'value1'); - t.equal(json.fields.field2, 'evaluated at runtime'); - - t.end(); - }); - - batch.test('extra fields should be added to the fields structure', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - layout: { - type: 'dummy' - } - }); - setup.logger.log('trace', 'Log event #1', { extra1: 'value1', extra2: 'value2' }); - - const json = JSON.parse(setup.results.buffer.toString()); - const fields = { - extra1: 'value1', - extra2: 'value2', - level: 'TRACE', - category: 'myLogger' - }; - t.equal(JSON.stringify(json.fields), JSON.stringify(fields)); - t.end(); - }); - - batch.test('use direct args', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - args: 'direct', - layout: { - type: 'dummy' - } - }); - - setup.logger.log('info', 'Log event with fields', { extra1: 'value1', extra2: 'value2' }); - const json = JSON.parse(setup.results.buffer.toString()); - - t.equal(json.extra1, 'value1'); - t.equal(json.extra2, 'value2'); - t.equal(json.fields, undefined); - t.end(); - }); - - batch.test('use fields args', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - args: 'fields', - layout: { - type: 'dummy' - } - }); - - setup.logger.log('info', 'Log event with fields', { extra1: 'value1', extra2: 'value2' }); - const json = JSON.parse(setup.results.buffer.toString()); - - t.equal(json.extra1, undefined); - t.equal(json.extra2, undefined); - t.equal(json.fields.extra1, 'value1'); - t.equal(json.fields.extra2, 'value2'); - t.end(); - }); - - batch.test('Send null as argument', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - layout: { - type: 'dummy' - } - }); - - const msg = 'test message with null'; - setup.logger.info(msg, null); - const json = JSON.parse(setup.results.buffer.toString()); - - t.equal(json.message, msg); - t.end(); - }); - - batch.test('Send undefined as argument', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - layout: { - type: 'dummy' - } - }); - - const msg = 'test message with undefined'; - setup.logger.info(msg, undefined); - const json = JSON.parse(setup.results.buffer.toString()); - - t.equal(json.message, msg); - t.end(); - }); - - batch.test('shutdown should close sockets', (t) => { - const setup = setupLogging('myLogger', { - host: '127.0.0.1', - port: 10001, - type: 'logstashUDP', - category: 'myLogger', - layout: { - type: 'dummy' - } - }); - setup.log4js.shutdown(() => { - t.ok(setup.socket.closed); - t.end(); - }); - }); - - batch.end(); -}); diff --git a/types/log4js.d.ts b/types/log4js.d.ts index 7d343cd..42df445 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -215,22 +215,6 @@ export interface LogLevelFilterAppender { maxLevel?: string; } -export interface LogstashUDPAppender { - type: 'logstashUDP'; - // hostname (or IP-address) of the logstash server - host: string; - // port of the logstash server - port: number; - // used for the type field in the logstash data - logType?: string; - // used for the type field of the logstash data if logType is not defined - category?: string; - // extra fields to log with each event - fields?: { [fieldname: string]: any }; - // (defaults to dummyLayout) used for the message field of the logstash data - layout?: Layout; -} - export interface MailgunAppender { type: 'mailgun'; // your mailgun API key @@ -370,7 +354,6 @@ export type Appender = CategoryFilterAppender | LogFacesHTTPAppender | LogFacesUDPAppender | LogLevelFilterAppender - | LogstashUDPAppender | MailgunAppender | MultiFileAppender | MultiprocessAppender From 006fd907937a7394a932b90c4ecd1b2b3b54d758 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 20 Apr 2018 07:36:35 +1000 Subject: [PATCH 24/34] chore: remove mailgun appender --- lib/appenders/mailgun.js | 44 -------- package.json | 1 - test/tap/mailgunAppender-test.js | 183 ------------------------------- types/log4js.d.ts | 14 --- 4 files changed, 242 deletions(-) delete mode 100644 lib/appenders/mailgun.js delete mode 100644 test/tap/mailgunAppender-test.js diff --git a/lib/appenders/mailgun.js b/lib/appenders/mailgun.js deleted file mode 100644 index 5fe2c17..0000000 --- a/lib/appenders/mailgun.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict'; - -/** - * This appender has been deprecated. - * Updates and bug fixes should be made against https://github.com/log4js-node/mailgun - */ -const mailgunFactory = require('mailgun-js'); - -function mailgunAppender(config, layout) { - const mailgun = mailgunFactory({ - apiKey: config.apikey, - domain: config.domain - }); - - const appender = (loggingEvent) => { - const data = { - from: config.from, - to: config.to, - subject: config.subject, - text: layout(loggingEvent, config.timezoneOffset) - }; - - /* eslint no-unused-vars:0 */ - mailgun.messages().send(data, (error, body) => { - if (error !== null) console.error('log4js.mailgunAppender - Error happened', error); - }); - }; - - // trigger a deprecation warning. - appender.deprecated = '@logj4s-node/mailgun'; - - return appender; -} - -function configure(config, layouts) { - let layout = layouts.basicLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - - return mailgunAppender(config, layout); -} - -module.exports.configure = configure; diff --git a/package.json b/package.json index 31addfa..4b3fd6d 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,6 @@ "validate-commit-msg": "^2.14.0" }, "optionalDependencies": { - "mailgun-js": "^0.7.0", "nodemailer": "^2.5.0", "redis": "^2.7.1", "slack-node": "~0.2.0", diff --git a/test/tap/mailgunAppender-test.js b/test/tap/mailgunAppender-test.js deleted file mode 100644 index 40be170..0000000 --- a/test/tap/mailgunAppender-test.js +++ /dev/null @@ -1,183 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const layouts = require('../../lib/layouts'); -const sandbox = require('@log4js-node/sandboxed-module'); - -function setupLogging(category, options) { - const msgs = []; - - const mailgunCredentials = { - apiKey: options.apikey, - domain: options.domain - }; - - const fakeMailgun = function () { - return { - messages: function () { - return { - config: options, - send: function (data, callback) { - msgs.push(data); - callback(false, { status: 'OK' }); - } - }; - } - }; - }; - - const fakeLayouts = { - layout: function (type, config) { - this.type = type; - this.config = config; - return layouts.messagePassThroughLayout; - }, - basicLayout: layouts.basicLayout, - messagePassThroughLayout: layouts.messagePassThroughLayout - }; - - const fakeConsole = { - errors: [], - logs: [], - error: function (msg, value) { - this.errors.push({ msg: msg, value: value }); - }, - log: function (msg, value) { - this.logs.push({ msg: msg, value: value }); - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - 'mailgun-js': fakeMailgun, - './layouts': fakeLayouts - }, - globals: { - console: fakeConsole - } - }); - options = options || {}; - options.type = 'mailgun'; - log4js.configure({ - appenders: { mailgun: options }, - categories: { default: { appenders: ['mailgun'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - mailer: fakeMailgun, - layouts: fakeLayouts, - console: fakeConsole, - mails: msgs, - credentials: mailgunCredentials - }; -} - -function checkMessages(assert, result) { - for (let i = 0; i < result.mails.length; ++i) { - assert.equal(result.mails[i].from, 'sender@domain.com'); - assert.equal(result.mails[i].to, 'recepient@domain.com'); - assert.equal(result.mails[i].subject, 'This is subject'); - assert.ok(new RegExp(`.+Log event #${i + 1}`).test(result.mails[i].text)); - } -} - -test('log4js mailgunAppender', (batch) => { - batch.test('mailgun setup', (t) => { - const result = setupLogging('mailgun setup', { - apikey: 'APIKEY', - domain: 'DOMAIN', - from: 'sender@domain.com', - to: 'recepient@domain.com', - subject: 'This is subject' - }); - - t.test('mailgun credentials should match', (assert) => { - assert.equal(result.credentials.apiKey, 'APIKEY'); - assert.equal(result.credentials.domain, 'DOMAIN'); - assert.end(); - }); - t.end(); - }); - - batch.test('basic usage', (t) => { - const result = setupLogging('basic usage', { - apikey: 'APIKEY', - domain: 'DOMAIN', - from: 'sender@domain.com', - to: 'recepient@domain.com', - subject: 'This is subject' - }); - - result.logger.info('Log event #1'); - - t.equal(result.mails.length, 1, 'should be one message only'); - checkMessages(t, result); - t.end(); - }); - - batch.test('config with layout', (t) => { - const result = setupLogging('config with layout', { - layout: { - type: 'tester' - } - }); - t.equal(result.layouts.type, 'tester', 'should configure layout'); - t.end(); - }); - - batch.test('error when sending email', (t) => { - const setup = setupLogging('separate email for each event', { - apikey: 'APIKEY', - domain: 'DOMAIN', - from: 'sender@domain.com', - to: 'recepient@domain.com', - subject: 'This is subject' - }); - - setup.mailer.messages = function () { - return { - send: function (msg, cb) { - cb({ msg: 'log4js.mailgunAppender - Error happened' }, null); - } - }; - }; - - setup.logger.info('This will break'); - const cons = setup.console; - - t.test('should be logged to console', (assert) => { - assert.equal(cons.errors.length, 2); - // errors[0] is the deprecation warning - assert.equal(cons.errors[1].msg, 'log4js.mailgunAppender - Error happened'); - assert.end(); - }); - t.end(); - }); - - batch.test('separate email for each event', (t) => { - const setup = setupLogging('separate email for each event', { - apikey: 'APIKEY', - domain: 'DOMAIN', - from: 'sender@domain.com', - to: 'recepient@domain.com', - subject: 'This is subject' - }); - setTimeout(() => { - setup.logger.info('Log event #1'); - }, 0); - setTimeout(() => { - setup.logger.info('Log event #2'); - }, 500); - setTimeout(() => { - setup.logger.info('Log event #3'); - }, 1100); - setTimeout(() => { - t.equal(setup.mails.length, 3, 'should be three messages'); - checkMessages(t, setup); - t.end(); - }, 3000); - }); - - batch.end(); -}); diff --git a/types/log4js.d.ts b/types/log4js.d.ts index 42df445..855434b 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -215,19 +215,6 @@ export interface LogLevelFilterAppender { maxLevel?: string; } -export interface MailgunAppender { - type: 'mailgun'; - // your mailgun API key - apiKey: string; - // your domain - domain: string; - from: string; - to: string; - subject: string; - // (defaults to basicLayout) - layout?: Layout; -} - export interface MultiFileAppender { type: 'multiFile'; // the base part of the generated log filename @@ -354,7 +341,6 @@ export type Appender = CategoryFilterAppender | LogFacesHTTPAppender | LogFacesUDPAppender | LogLevelFilterAppender - | MailgunAppender | MultiFileAppender | MultiprocessAppender | RedisAppender From 633c7108271306358596d73682e5913782d8577a Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 31 May 2018 08:01:40 +1000 Subject: [PATCH 25/34] fix: removed SMTP appender --- examples/smtp-appender.js | 2 +- lib/appenders/smtp.js | 148 ----------------- package-lock.json | 300 ---------------------------------- package.json | 4 +- test/tap/smtpAppender-test.js | 280 ------------------------------- 5 files changed, 2 insertions(+), 732 deletions(-) delete mode 100644 lib/appenders/smtp.js delete mode 100644 test/tap/smtpAppender-test.js diff --git a/examples/smtp-appender.js b/examples/smtp-appender.js index cf6f520..3274516 100644 --- a/examples/smtp-appender.js +++ b/examples/smtp-appender.js @@ -9,7 +9,7 @@ log4js.configure({ type: 'console' }, mail: { - type: 'smtp', + type: '@log4js-node/smtp', recipients: 'logfilerecipient@logging.com', sendInterval: 5, transport: 'SMTP', diff --git a/lib/appenders/smtp.js b/lib/appenders/smtp.js deleted file mode 100644 index d2fe8f4..0000000 --- a/lib/appenders/smtp.js +++ /dev/null @@ -1,148 +0,0 @@ -'use strict'; - -/** - * This appender has been deprecated. - * Updates and bug fixes should be made against https://github.com/log4js-node/smtp - */ - -const mailer = require('nodemailer'); -const os = require('os'); - -/** - * SMTP Appender. Sends logging events using SMTP protocol. - * It can either send an email on each event or group several - * logging events gathered during specified interval. - * - * @param _config appender configuration data - * config.sendInterval time between log emails (in seconds), if 0 - * then every event sends an email - * config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5). - * @param _layout a function that takes a logevent and returns a string (defaults to basicLayout). - */ -function smtpAppender(config, layout, subjectLayout) { - if (!config.attachment) { - config.attachment = {}; - } - - config.attachment.enable = !!config.attachment.enable; - config.attachment.message = config.attachment.message || 'See logs as attachment'; - config.attachment.filename = config.attachment.filename || 'default.log'; - - const sendInterval = config.sendInterval * 1000 || 0; - const shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000; - const transport = mailer.createTransport(getTransportOptions()); - const logEventBuffer = []; - - let unsentCount = 0; - let sendTimer; - - function sendBuffer() { - if (logEventBuffer.length > 0) { - const firstEvent = logEventBuffer[0]; - let body = ''; - const count = logEventBuffer.length; - while (logEventBuffer.length > 0) { - body += `${layout(logEventBuffer.shift(), config.timezoneOffset)}\n`; - } - - const msg = { - to: config.recipients, - subject: config.subject || subjectLayout(firstEvent), - headers: { Hostname: os.hostname() } - }; - - if (config.attachment.enable === true) { - msg[config.html ? 'html' : 'text'] = config.attachment.message; - msg.attachments = [ - { - filename: config.attachment.filename, - contentType: 'text/x-log', - content: body - } - ]; - } else { - msg[config.html ? 'html' : 'text'] = body; - } - - if (config.sender) { - msg.from = config.sender; - } - transport.sendMail(msg, (error) => { - if (error) { - console.error('log4js.smtpAppender - Error happened', error); - } - transport.close(); - unsentCount -= count; - }); - } - } - - function getTransportOptions() { - let options = null; - if (config.SMTP) { - options = config.SMTP; - } else if (config.transport) { - options = config.transport.options || {}; - options.transport = config.transport.plugin || 'smtp'; - } - return options; - } - - function scheduleSend() { - if (!sendTimer) { - sendTimer = setTimeout(() => { - sendTimer = null; - sendBuffer(); - }, sendInterval); - } - } - - function shutdown(cb) { - if (shutdownTimeout > 0) { - setTimeout(() => { - if (sendTimer) { - clearTimeout(sendTimer); - } - - sendBuffer(); - }, shutdownTimeout); - } - - (function checkDone() { - if (unsentCount > 0) { - setTimeout(checkDone, 100); - } else { - cb(); - } - }()); - } - - const appender = (loggingEvent) => { - unsentCount++; // eslint-disable-line no-plusplus - logEventBuffer.push(loggingEvent); - if (sendInterval > 0) { - scheduleSend(); - } else { - sendBuffer(); - } - }; - - appender.shutdown = shutdown; - - // trigger a deprecation warning. - appender.deprecated = '@logj4s-node/smtp'; - - return appender; -} - -function configure(config, layouts) { - const subjectLayout = layouts.messagePassThroughLayout; - let layout = layouts.basicLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - return smtpAppender(config, layout, subjectLayout); -} - - -module.exports.configure = configure; diff --git a/package-lock.json b/package-lock.json index 5f529a9..cd167a3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2317,306 +2317,6 @@ "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", "dev": true }, - "nodemailer": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-2.7.2.tgz", - "integrity": "sha1-8kLmSa7q45tsftdA73sGHEBNMPk=", - "optional": true, - "requires": { - "libmime": "3.0.0", - "mailcomposer": "4.0.1", - "nodemailer-direct-transport": "3.3.2", - "nodemailer-shared": "1.1.0", - "nodemailer-smtp-pool": "2.8.2", - "nodemailer-smtp-transport": "2.7.2", - "socks": "1.1.9" - }, - "dependencies": { - "libmime": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/libmime/-/libmime-3.0.0.tgz", - "integrity": "sha1-UaGp50SOy9Ms2lRCFnW7IbwJPaY=", - "requires": { - "iconv-lite": "0.4.15", - "libbase64": "0.1.0", - "libqp": "1.1.0" - }, - "dependencies": { - "iconv-lite": { - "version": "0.4.15", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.15.tgz", - "integrity": "sha1-/iZaIYrGpXz+hUkn6dBMGYJe3es=" - }, - "libbase64": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/libbase64/-/libbase64-0.1.0.tgz", - "integrity": "sha1-YjUag5VjrF/1vSbxL2Dpgwu3UeY=" - }, - "libqp": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/libqp/-/libqp-1.1.0.tgz", - "integrity": "sha1-9ebgatdLeU+1tbZpiL9yjvHe2+g=" - } - } - }, - "mailcomposer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/mailcomposer/-/mailcomposer-4.0.1.tgz", - "integrity": "sha1-DhxEsqB890DuF9wUm6AJ8Zyt/rQ=", - "optional": true, - "requires": { - "buildmail": "4.0.1", - "libmime": "3.0.0" - }, - "dependencies": { - "buildmail": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/buildmail/-/buildmail-4.0.1.tgz", - "integrity": "sha1-h393OLeHKYccmhBeO4N9K+EaenI=", - "optional": true, - "requires": { - "addressparser": "1.0.1", - "libbase64": "0.1.0", - "libmime": "3.0.0", - "libqp": "1.1.0", - "nodemailer-fetch": "1.6.0", - "nodemailer-shared": "1.1.0", - "punycode": "1.4.1" - }, - "dependencies": { - "addressparser": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/addressparser/-/addressparser-1.0.1.tgz", - "integrity": "sha1-R6++GiqSYhkdtoOOT9HTm0CCF0Y=", - "optional": true - }, - "libbase64": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/libbase64/-/libbase64-0.1.0.tgz", - "integrity": "sha1-YjUag5VjrF/1vSbxL2Dpgwu3UeY=", - "optional": true - }, - "libqp": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/libqp/-/libqp-1.1.0.tgz", - "integrity": "sha1-9ebgatdLeU+1tbZpiL9yjvHe2+g=", - "optional": true - }, - "nodemailer-fetch": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/nodemailer-fetch/-/nodemailer-fetch-1.6.0.tgz", - "integrity": "sha1-ecSQihwPXzdbc/6IjamCj23JY6Q=", - "optional": true - }, - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "optional": true - } - } - } - } - }, - "nodemailer-direct-transport": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/nodemailer-direct-transport/-/nodemailer-direct-transport-3.3.2.tgz", - "integrity": "sha1-6W+vuQNYVglH5WkBfZfmBzilCoY=", - "optional": true, - "requires": { - "nodemailer-shared": "1.1.0", - "smtp-connection": "2.12.0" - }, - "dependencies": { - "smtp-connection": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", - "integrity": "sha1-1275EnyyPCJZ7bHoNJwujV4tdME=", - "optional": true, - "requires": { - "httpntlm": "1.6.1", - "nodemailer-shared": "1.1.0" - }, - "dependencies": { - "httpntlm": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", - "integrity": "sha1-rQFScUOi6Hc8+uapb1hla7UqNLI=", - "optional": true, - "requires": { - "httpreq": ">=0.4.22", - "underscore": "~1.7.0" - }, - "dependencies": { - "httpreq": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.24.tgz", - "integrity": "sha1-QzX/2CzZaWaKOUZckprGHWOTYn8=", - "optional": true - }, - "underscore": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=", - "optional": true - } - } - } - } - } - } - }, - "nodemailer-shared": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/nodemailer-shared/-/nodemailer-shared-1.1.0.tgz", - "integrity": "sha1-z1mU4v0mjQD1zw+nZ6CBae2wfsA=", - "requires": { - "nodemailer-fetch": "1.6.0" - }, - "dependencies": { - "nodemailer-fetch": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/nodemailer-fetch/-/nodemailer-fetch-1.6.0.tgz", - "integrity": "sha1-ecSQihwPXzdbc/6IjamCj23JY6Q=" - } - } - }, - "nodemailer-smtp-pool": { - "version": "2.8.2", - "resolved": "https://registry.npmjs.org/nodemailer-smtp-pool/-/nodemailer-smtp-pool-2.8.2.tgz", - "integrity": "sha1-LrlNbPhXgLG0clzoU7nL1ejajHI=", - "optional": true, - "requires": { - "nodemailer-shared": "1.1.0", - "nodemailer-wellknown": "0.1.10", - "smtp-connection": "2.12.0" - }, - "dependencies": { - "nodemailer-wellknown": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/nodemailer-wellknown/-/nodemailer-wellknown-0.1.10.tgz", - "integrity": "sha1-WG24EB2zDLRDjrVGc3pBqtDPE9U=", - "optional": true - }, - "smtp-connection": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", - "integrity": "sha1-1275EnyyPCJZ7bHoNJwujV4tdME=", - "optional": true, - "requires": { - "httpntlm": "1.6.1", - "nodemailer-shared": "1.1.0" - }, - "dependencies": { - "httpntlm": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", - "integrity": "sha1-rQFScUOi6Hc8+uapb1hla7UqNLI=", - "optional": true, - "requires": { - "httpreq": ">=0.4.22", - "underscore": "~1.7.0" - }, - "dependencies": { - "httpreq": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.24.tgz", - "integrity": "sha1-QzX/2CzZaWaKOUZckprGHWOTYn8=", - "optional": true - }, - "underscore": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=", - "optional": true - } - } - } - } - } - } - }, - "nodemailer-smtp-transport": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/nodemailer-smtp-transport/-/nodemailer-smtp-transport-2.7.2.tgz", - "integrity": "sha1-A9ccdjFPFKx9vHvwM6am0W1n+3c=", - "optional": true, - "requires": { - "nodemailer-shared": "1.1.0", - "nodemailer-wellknown": "0.1.10", - "smtp-connection": "2.12.0" - }, - "dependencies": { - "nodemailer-wellknown": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/nodemailer-wellknown/-/nodemailer-wellknown-0.1.10.tgz", - "integrity": "sha1-WG24EB2zDLRDjrVGc3pBqtDPE9U=", - "optional": true - }, - "smtp-connection": { - "version": "2.12.0", - "resolved": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", - "integrity": "sha1-1275EnyyPCJZ7bHoNJwujV4tdME=", - "optional": true, - "requires": { - "httpntlm": "1.6.1", - "nodemailer-shared": "1.1.0" - }, - "dependencies": { - "httpntlm": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", - "integrity": "sha1-rQFScUOi6Hc8+uapb1hla7UqNLI=", - "optional": true, - "requires": { - "httpreq": ">=0.4.22", - "underscore": "~1.7.0" - }, - "dependencies": { - "httpreq": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.24.tgz", - "integrity": "sha1-QzX/2CzZaWaKOUZckprGHWOTYn8=", - "optional": true - }, - "underscore": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", - "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=", - "optional": true - } - } - } - } - } - } - }, - "socks": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/socks/-/socks-1.1.9.tgz", - "integrity": "sha1-Yo1+TQSRJDVEWsC25Fk3bLPm1pE=", - "optional": true, - "requires": { - "ip": "^1.1.2", - "smart-buffer": "^1.0.4" - }, - "dependencies": { - "ip": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", - "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", - "optional": true - }, - "smart-buffer": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.1.15.tgz", - "integrity": "sha1-fxFLW2X6s+KjWqd1uxLw0cZJvxY=", - "optional": true - } - } - } - } - }, "normalize-package-data": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", diff --git a/package.json b/package.json index ebfdb1b..292ba5c 100644 --- a/package.json +++ b/package.json @@ -33,9 +33,8 @@ "commitmsg": "validate-commit-msg", "posttest": "npm run clean", "pretest": "eslint 'lib/**/*.js' 'test/**/*.js'", - "test": "tap 'test/tap/**/*.js'", + "test": "tap 'test/tap/**/*.js' --cov", "typings": "tsc -p types/tsconfig.json", - "coverage": "tap 'test/tap/**/*.js' --cov", "codecov": "tap 'test/tap/**/*.js' --cov --coverage-report=lcov && codecov" }, "directories": { @@ -64,7 +63,6 @@ "validate-commit-msg": "^2.14.0" }, "optionalDependencies": { - "nodemailer": "^2.5.0", "redis": "^2.7.1", "slack-node": "~0.2.0", "axios": "^0.15.3", diff --git a/test/tap/smtpAppender-test.js b/test/tap/smtpAppender-test.js deleted file mode 100644 index 4aad5df..0000000 --- a/test/tap/smtpAppender-test.js +++ /dev/null @@ -1,280 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const realLayouts = require('../../lib/layouts'); -const sandbox = require('@log4js-node/sandboxed-module'); - -function setupLogging(category, options, errorOnSend) { - const msgs = []; - - const fakeMailer = { - createTransport: function (name, opts) { - return { - config: opts, - sendMail: function (msg, callback) { - if (errorOnSend) { - callback({ message: errorOnSend }); - return; - } - msgs.push(msg); - callback(null, true); - }, - close: function () { - } - }; - } - }; - - const fakeLayouts = { - layout: function (type, config) { - this.type = type; - this.config = config; - return realLayouts.messagePassThroughLayout; - }, - basicLayout: realLayouts.basicLayout, - messagePassThroughLayout: realLayouts.messagePassThroughLayout - }; - - const fakeConsole = { - log: () => {}, - errors: [], - error: function (msg, value) { - this.errors.push({ msg: msg, value: value }); - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - nodemailer: fakeMailer, - './layouts': fakeLayouts - }, - globals: { - console: fakeConsole - } - }); - - options.type = 'smtp'; - log4js.configure({ - appenders: { - smtp: options - }, - categories: { default: { appenders: ['smtp'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - mailer: fakeMailer, - layouts: fakeLayouts, - console: fakeConsole, - results: msgs - }; -} - -function checkMessages(assert, result, sender, subject) { - for (let i = 0; i < result.results.length; ++i) { - assert.equal(result.results[i].from, sender); - assert.equal(result.results[i].to, 'recipient@domain.com'); - assert.equal(result.results[i].subject, subject ? subject : `Log event #${i + 1}`); // eslint-disable-line - assert.ok(new RegExp(`.+Log event #${i + 1}\n$`).test(result.results[i].text)); - } -} - -test('log4js smtpAppender', (batch) => { - batch.test('minimal config', (t) => { - const setup = setupLogging('minimal config', { - recipients: 'recipient@domain.com', - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setup.logger.info('Log event #1'); - - t.equal(setup.results.length, 1, 'should be one message only'); - checkMessages(t, setup); - t.end(); - }); - - batch.test('fancy config', (t) => { - const setup = setupLogging('fancy config', { - recipients: 'recipient@domain.com', - sender: 'sender@domain.com', - subject: 'This is subject', - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setup.logger.info('Log event #1'); - - t.equal(setup.results.length, 1, 'should be one message only'); - checkMessages(t, setup, 'sender@domain.com', 'This is subject'); - t.end(); - }); - - batch.test('config with layout', (t) => { - const setup = setupLogging('config with layout', { - layout: { - type: 'tester' - } - }); - t.equal(setup.layouts.type, 'tester', 'should configure layout'); - t.end(); - }); - - batch.test('separate email for each event', (t) => { - const setup = setupLogging('separate email for each event', { - recipients: 'recipient@domain.com', - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setTimeout(() => { - setup.logger.info('Log event #1'); - }, 0); - setTimeout(() => { - setup.logger.info('Log event #2'); - }, 500); - setTimeout(() => { - setup.logger.info('Log event #3'); - }, 1100); - setTimeout(() => { - t.equal(setup.results.length, 3, 'there should be three messages'); - checkMessages(t, setup); - t.end(); - }, 3000); - }); - - batch.test('multiple events in one email', (t) => { - const setup = setupLogging('multiple events in one email', { - recipients: 'recipient@domain.com', - sendInterval: 1, - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setTimeout(() => { - setup.logger.info('Log event #1'); - }, 0); - setTimeout(() => { - setup.logger.info('Log event #2'); - }, 100); - setTimeout(() => { - setup.logger.info('Log event #3'); - }, 1500); - setTimeout(() => { - t.equal(setup.results.length, 2, 'there should be two messages'); - t.equal(setup.results[0].to, 'recipient@domain.com'); - t.equal(setup.results[0].subject, 'Log event #1'); - t.equal( - setup.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, - 2 - ); - t.equal(setup.results[1].to, 'recipient@domain.com'); - t.equal(setup.results[1].subject, 'Log event #3'); - t.ok(/.+Log event #3\n$/.test(setup.results[1].text)); - t.end(); - }, 3000); - }); - - batch.test('error when sending email', (t) => { - const setup = setupLogging('error when sending email', { - recipients: 'recipient@domain.com', - sendInterval: 0, - SMTP: { port: 25, auth: { user: 'user@domain.com' } } - }, 'oh noes'); - - setup.logger.info('This will break'); - - t.test('should be logged to console', (assert) => { - assert.equal(setup.console.errors.length, 1); - assert.equal(setup.console.errors[0].msg, 'log4js.smtpAppender - Error happened'); - assert.equal(setup.console.errors[0].value.message, 'oh noes'); - assert.end(); - }); - t.end(); - }); - - batch.test('transport full config', (t) => { - const setup = setupLogging('transport full config', { - recipients: 'recipient@domain.com', - transport: { - plugin: 'sendmail', - options: { - path: '/usr/sbin/sendmail' - } - } - }); - setup.logger.info('Log event #1'); - - t.equal(setup.results.length, 1, 'should be one message only'); - checkMessages(t, setup); - t.end(); - }); - - batch.test('transport no-options config', (t) => { - const setup = setupLogging('transport no-options config', { - recipients: 'recipient@domain.com', - transport: { - plugin: 'sendmail' - } - }); - setup.logger.info('Log event #1'); - - t.equal(setup.results.length, 1, 'should be one message only'); - checkMessages(t, setup); - t.end(); - }); - - batch.test('transport no-plugin config', (t) => { - const setup = setupLogging('transport no-plugin config', { - recipients: 'recipient@domain.com', - transport: {} - }); - setup.logger.info('Log event #1'); - - t.equal(setup.results.length, 1, 'should be one message only'); - checkMessages(t, setup); - t.end(); - }); - - batch.test('attachment config', (t) => { - const setup = setupLogging('attachment config', { - recipients: 'recipient@domain.com', - attachment: { - enable: true - }, - SMTP: { - port: 25, - auth: { - user: 'user@domain.com' - } - } - }); - setup.logger.info('Log event #1'); - - t.test('message should contain proper data', (assert) => { - assert.equal(setup.results.length, 1); - assert.equal(setup.results[0].attachments.length, 1); - const attachment = setup.results[0].attachments[0]; - assert.equal(setup.results[0].text, 'See logs as attachment'); - assert.equal(attachment.filename, 'default.log'); - assert.equal(attachment.contentType, 'text/x-log'); - assert.ok(new RegExp(`.+Log event #${1}\n$`).test(attachment.content)); - assert.end(); - }); - t.end(); - }); - - batch.end(); -}); From 18f5adbd77bea9b59a91710c3273526e5d6d4b46 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 31 May 2018 08:02:11 +1000 Subject: [PATCH 26/34] test: made multiprocess test a bit more reliable --- test/tap/multiprocess-shutdown-test.js | 19 +++++++++---------- test/tap/multiprocess-worker.js | 1 + 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/test/tap/multiprocess-shutdown-test.js b/test/tap/multiprocess-shutdown-test.js index fe18bb0..e4b014a 100644 --- a/test/tap/multiprocess-shutdown-test.js +++ b/test/tap/multiprocess-shutdown-test.js @@ -119,18 +119,17 @@ test('multiprocess appender crash (worker)', (t) => { categories: { default: { appenders: ['multi'], level: 'debug' } } }); - setTimeout(() => { - const worker = childProcess.fork( - require.resolve('./multiprocess-worker'), - ['start-multiprocess-worker', loggerPort] - ); - - setTimeout(() => { + const worker = childProcess.fork( + require.resolve('./multiprocess-worker'), + ['start-multiprocess-worker', loggerPort] + ); + worker.on('message', (m) => { + if (m === 'worker is done') { worker.kill(); setTimeout(() => { t.equal(messages[0], 'Logging from worker'); log4jsWithFakeConsole.shutdown(() => t.end()); - }, 250); - }, 250); - }, 250); + }, 500); + } + }); }); diff --git a/test/tap/multiprocess-worker.js b/test/tap/multiprocess-worker.js index c5b4b7c..a688b0a 100644 --- a/test/tap/multiprocess-worker.js +++ b/test/tap/multiprocess-worker.js @@ -8,4 +8,5 @@ if (process.argv.indexOf('start-multiprocess-worker') >= 0) { categories: { default: { appenders: ['multi'], level: 'debug' } } }); log4js.getLogger('worker').info('Logging from worker'); + process.send('worker is done'); } From 45516a91d141a500f2f1c9375de5ba3fd69d8b5e Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 7 Jun 2018 08:19:01 +1000 Subject: [PATCH 27/34] chore: remove redis appender from core --- lib/appenders/redis.js | 51 ----------- package-lock.json | 31 ------- package.json | 1 - test/tap/redisAppender-test.js | 152 --------------------------------- 4 files changed, 235 deletions(-) delete mode 100644 lib/appenders/redis.js delete mode 100644 test/tap/redisAppender-test.js diff --git a/lib/appenders/redis.js b/lib/appenders/redis.js deleted file mode 100644 index 93f1637..0000000 --- a/lib/appenders/redis.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -/** - * This appender has been deprecated, and will be removed in version 3. - * Any bug fixes or improvements should be made against @log4js-node/redis - */ -const redis = require('redis'); -const util = require('util'); - -function redisAppender(config, layout) { - const host = config.host || '127.0.0.1'; - const port = config.port || 6379; - const auth = config.pass ? { auth_pass: config.pass } : {}; - const redisClient = redis.createClient(port, host, auth); - - redisClient.on('error', (err) => { - if (err) { - console.error(`log4js.redisAppender - ${host}:${port} Error: ${util.inspect(err)}`); - } - }); - - const appender = function (loggingEvent) { - const message = layout(loggingEvent); - redisClient.publish(config.channel, message, (err) => { - if (err) { - console.error(`log4js.redisAppender - ${host}:${port} Error: ${util.inspect(err)}`); - } - }); - }; - - appender.shutdown = (cb) => { - redisClient.quit(); - if (cb) cb(); - }; - - // trigger a deprecation warning. - appender.deprecated = '@logj4s-node/redis'; - - return appender; -} - -function configure(config, layouts) { - let layout = layouts.messagePassThroughLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - - return redisAppender(config, layout); -} - -module.exports.configure = configure; diff --git a/package-lock.json b/package-lock.json index a9b3621..a4b0a25 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5691,37 +5691,6 @@ "strip-indent": "^2.0.0" } }, - "redis": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/redis/-/redis-2.8.0.tgz", - "integrity": "sha1-ICKI4/WMSfYHnZevehDhMDrhSwI=", - "optional": true, - "requires": { - "double-ended-queue": "^2.1.0-0", - "redis-commands": "^1.2.0", - "redis-parser": "^2.6.0" - }, - "dependencies": { - "double-ended-queue": { - "version": "2.1.0-0", - "resolved": "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz", - "integrity": "sha1-ED01J/0xUo9AGIEwyEHv3XgmTlw=", - "optional": true - }, - "redis-commands": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.3.1.tgz", - "integrity": "sha1-gdgm9F+pyLIBH0zXoP5ZfSQdRCs=", - "optional": true - }, - "redis-parser": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz", - "integrity": "sha1-Uu0J2srBCPGmMcB+m2mUHnoZUEs=", - "optional": true - } - } - }, "regexpp": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-1.1.0.tgz", diff --git a/package.json b/package.json index 0793fde..7f7ddfa 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,6 @@ "validate-commit-msg": "^2.14.0" }, "optionalDependencies": { - "redis": "^2.7.1", "slack-node": "~0.2.0", "axios": "^0.15.3", "amqplib": "^0.5.2" diff --git a/test/tap/redisAppender-test.js b/test/tap/redisAppender-test.js deleted file mode 100644 index 8f9a0a0..0000000 --- a/test/tap/redisAppender-test.js +++ /dev/null @@ -1,152 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const appender = require('../../lib/appenders/redis'); - -function setupLogging(category, options) { - const fakeRedis = { - msgs: [], - createClient: function (port, host, optionR) { - this.port = port; - this.host = host; - this.optionR = optionR; - - return { - on: function (event, callback) { - fakeRedis.errorCb = callback; - }, - publish: function (channel, message, callback) { - fakeRedis.msgs.push({ channel: channel, message: message }); - fakeRedis.publishCb = callback; - }, - quit: function () { - fakeRedis.quitCalled = true; - }, - }; - } - }; - - const fakeConsole = { - log: () => {}, - errors: [], - error: function (msg) { - this.errors.push(msg); - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - redis: fakeRedis - }, - globals: { - console: fakeConsole - } - }); - log4js.configure({ - appenders: { redis: options }, - categories: { default: { appenders: ['redis'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - log4js: log4js, - fakeRedis: fakeRedis, - fakeConsole: fakeConsole - }; -} - -test('log4js redisAppender', (batch) => { - batch.test('should export a configure function', (t) => { - t.type(appender.configure, 'function'); - t.end(); - }); - - batch.test('redis setup', (t) => { - const result = setupLogging('redis setup', { - host: '123.123.123.123', - port: 1234, - pass: '123456', - channel: 'log', - type: 'redis', - layout: { - type: 'pattern', - pattern: 'cheese %m' - } - }); - - result.logger.info('Log event #1'); - result.fakeRedis.publishCb(); - - t.test('redis credentials should match', (assert) => { - assert.equal(result.fakeRedis.host, '123.123.123.123'); - assert.equal(result.fakeRedis.port, 1234); - assert.equal(result.fakeRedis.optionR.auth_pass, '123456'); - assert.equal(result.fakeRedis.msgs.length, 1, 'should be one message only'); - assert.equal(result.fakeRedis.msgs[0].channel, 'log'); - assert.equal(result.fakeRedis.msgs[0].message, 'cheese Log event #1'); - assert.end(); - }); - - t.end(); - }); - - batch.test('default values', (t) => { - const setup = setupLogging('defaults', { - type: 'redis', - channel: 'thing' - }); - - setup.logger.info('just testing'); - setup.fakeRedis.publishCb(); - - t.test('should use localhost', (assert) => { - assert.equal(setup.fakeRedis.host, '127.0.0.1'); - assert.equal(setup.fakeRedis.port, 6379); - assert.same(setup.fakeRedis.optionR, {}); - assert.end(); - }); - - t.test('should use message pass through layout', (assert) => { - assert.equal(setup.fakeRedis.msgs.length, 1); - assert.equal(setup.fakeRedis.msgs[0].channel, 'thing'); - assert.equal(setup.fakeRedis.msgs[0].message, 'just testing'); - assert.end(); - }); - - t.end(); - }); - - batch.test('redis errors', (t) => { - const setup = setupLogging('errors', { type: 'redis', channel: 'testing' }); - - setup.fakeRedis.errorCb('oh no, error on connect'); - setup.logger.info('something something'); - setup.fakeRedis.publishCb('oh no, error on publish'); - - t.test('should go to the console', (assert) => { - assert.equal(setup.fakeConsole.errors.length, 3); - assert.equal( - setup.fakeConsole.errors[1], - 'log4js.redisAppender - 127.0.0.1:6379 Error: \'oh no, error on connect\'' - ); - assert.equal( - setup.fakeConsole.errors[2], - 'log4js.redisAppender - 127.0.0.1:6379 Error: \'oh no, error on publish\'' - ); - assert.end(); - }); - t.end(); - }); - - batch.test('shutdown', (t) => { - const setup = setupLogging('shutdown', { type: 'redis', channel: 'testing' }); - - setup.log4js.shutdown(() => { - t.ok(setup.fakeRedis.quitCalled); - t.end(); - }); - }); - - batch.end(); -}); From d23f1af82bace78fb4ac70e2263337716c06113d Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 7 Jun 2018 08:22:04 +1000 Subject: [PATCH 28/34] chore: removed non-core appenders from types --- types/log4js.d.ts | 81 ----------------------------------------------- 1 file changed, 81 deletions(-) diff --git a/types/log4js.d.ts b/types/log4js.d.ts index 5cde881..f55561f 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -168,24 +168,6 @@ export interface DateFileAppender { daysToKeep?: number; } -export interface HipchatAppender { - type: 'hipchat'; - // User token with notification privileges - hipchat_token: string; - // Room ID or name - hipchat_room: string; - // (defaults to empty string) - a label to say where the message is from - hipchat_from?: string; - // (defaults to false) - make hipchat annoy people - hipchat_notify?: boolean; - // (defaults to api.hipchat.com) - set this if you have your own hipchat server - hipchat_host?: string; - // (defaults to only throwing errors) - implement this function if you want intercept the responses from hipchat - hipchat_response_callback?(err: Error, response: any): any; - // (defaults to messagePassThroughLayout) - layout?: Layout; -} - export interface LogFacesHTTPAppender { type: 'logFaces-HTTP'; // logFaces receiver servlet URL @@ -238,20 +220,6 @@ export interface MultiprocessAppender { loggerHost?: string; } -export interface RedisAppender { - type: 'redis'; - // (defaults to 127.0.0.1) - the location of the redis server - host?: string; - // (defaults to 6379) - the port the redis server is listening on - port?: number; - // password to use when authenticating connection to redis - pass?: string; - // the redis channel that log events will be published to - channel: string; - // (defaults to messagePassThroughLayout) - the layout to use for log events. - layout?: Layout; -} - export interface SlackAppender { type: 'slack'; // your Slack API token (see the slack and slack-node docs) @@ -270,52 +238,6 @@ export interface RecordingAppender { type: 'recording'; } -export interface SmtpAppender { - type: 'smtp'; - // (if not present will use transport field) - SMTP?: { - // (defaults to localhost) - host?: string; - // (defaults to 25) - port?: number; - // authentication details - auth?: { - user: string; - pass: string; - }; - }; - // (if not present will use SMTP) - see nodemailer docs for transport options - transport?: { - // (defaults to smtp) - the nodemailer transport plugin to use - plugin?: string; - // configuration for the transport plugin - options?: any; - } | string; - // send logs as email attachment - attachment?: { - // (defaults to false) - enable?: boolean; - // (defaults to See logs as attachment) - message to put in body of email - message: string; - // (defaults to default.log) - attachment filename - filename: string; - }; - // integer(defaults to 0) - batch emails and send in one email every sendInterval seconds, if 0 then every log message will send an email. - sendInterval?: number; - // (defaults to 5) - time in seconds to wait for emails to be sent during shutdown - shutdownTimeout?: number; - // email addresses to send the logs to - recipients: string; - // (defaults to message from first log event in batch) - subject for email - subject?: string; - // who the logs should be sent as - sender?: string; - // (defaults to false) - send the email as HTML instead of plain text - html?: boolean; - // (defaults to basicLayout) - layout?: Layout; -} - export interface StandardErrorAppender { type: 'stderr'; // (defaults to colouredLayout) @@ -338,16 +260,13 @@ export type Appender = CategoryFilterAppender | FileAppender | SyncfileAppender | DateFileAppender - | HipchatAppender | LogFacesHTTPAppender | LogFacesUDPAppender | LogLevelFilterAppender | MultiFileAppender | MultiprocessAppender - | RedisAppender | SlackAppender | RecordingAppender - | SmtpAppender | StandardErrorAppender | StandardOutputAppender | CustomAppender; From 4d054505a8ef2f9cf5c61186a6dc29484e6f7baf Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 15 Jun 2018 07:52:13 +1000 Subject: [PATCH 29/34] chore: removed slack appender --- docs/slack.md | 31 --- lib/appenders/slack.js | 47 ---- package-lock.json | 473 --------------------------------- package.json | 1 - test/tap/slackAppender-test.js | 163 ------------ types/log4js.d.ts | 15 -- 6 files changed, 730 deletions(-) delete mode 100644 docs/slack.md delete mode 100644 lib/appenders/slack.js delete mode 100644 test/tap/slackAppender-test.js diff --git a/docs/slack.md b/docs/slack.md deleted file mode 100644 index f1cfbf4..0000000 --- a/docs/slack.md +++ /dev/null @@ -1,31 +0,0 @@ -# Slack Appender - -Sends log events to a [slack](https://slack.com) channel. To use this appender you will need to include [slack-node](https://www.npmjs.com/package/slack-node) in your application's dependencies. - -## Configuration - -* `type` - `slack` -* `token` - `string` - your Slack API token (see the slack and slack-node docs) -* `channel_id` - `string` - the channel to send log messages -* `icon_url` - `string` (optional) - the icon to use for the message -* `username` - `string` - the username to display with the message -* `layout` - `object` (optional, defaults to `basicLayout`) - the layout to use for the message (see [layouts](layouts.md)). - -## Example - -```javascript -log4js.configure({ - appenders: { - alerts: { - type: 'slack', - token: 'abc123def', - channel_id: 'prod-alerts', - username: 'our_application' - } - }, - categories: { - default: { appenders: ['alerts'], level: 'error' } - } -}); -``` -This configuration will send all error (and above) messages to the `prod-alerts` slack channel, with the username `our_application`. diff --git a/lib/appenders/slack.js b/lib/appenders/slack.js deleted file mode 100644 index e348b93..0000000 --- a/lib/appenders/slack.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -/** - * This appender has been deprecated. - * Updates and bug fixes should be made against https://github.com/log4js-node/slack - */ -const Slack = require('slack-node'); - -function slackAppender(_config, layout, slack) { - const appender = (loggingEvent) => { - const data = { - channel_id: _config.channel_id, - text: layout(loggingEvent, _config.timezoneOffset), - icon_url: _config.icon_url, - username: _config.username - }; - - /* eslint no-unused-vars:0 */ - slack.api('chat.postMessage', { - channel: data.channel_id, - text: data.text, - icon_url: data.icon_url, - username: data.username - }, (err, response) => { - if (err) { - throw err; - } - }); - }; - - // trigger a deprecation warning. - appender.deprecated = '@logj4s-node/slack'; - return appender; -} - -function configure(_config, layouts) { - const slack = new Slack(_config.token); - - let layout = layouts.basicLayout; - if (_config.layout) { - layout = layouts.layout(_config.layout.type, _config.layout); - } - - return slackAppender(_config, layout, slack); -} - -module.exports.configure = configure; diff --git a/package-lock.json b/package-lock.json index a4b0a25..9da9e01 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5868,479 +5868,6 @@ "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", "dev": true }, - "slack-node": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/slack-node/-/slack-node-0.2.0.tgz", - "integrity": "sha1-3kuN3aqLeT9h29KTgQT9q/N9+jA=", - "optional": true, - "requires": { - "requestretry": "^1.2.2" - }, - "dependencies": { - "requestretry": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-1.12.2.tgz", - "integrity": "sha1-E844pM5OgJ88nsbUyjt7m6Ss8mw=", - "optional": true, - "requires": { - "extend": "^3.0.0", - "lodash": "^4.15.0", - "request": "^2.74.0", - "when": "^3.7.7" - }, - "dependencies": { - "extend": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", - "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" - }, - "lodash": { - "version": "4.17.4", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.4.tgz", - "integrity": "sha1-eCA6TRwyiuHYbcpkYONptX9AVa4=", - "optional": true - }, - "request": { - "version": "2.83.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", - "integrity": "sha1-ygtl2gLtYpNYh4COb1EDgQNOM1Y=", - "optional": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.6.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.5", - "extend": "~3.0.1", - "forever-agent": "~0.6.1", - "form-data": "~2.3.1", - "har-validator": "~5.0.3", - "hawk": "~6.0.2", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.17", - "oauth-sign": "~0.8.2", - "performance-now": "^2.1.0", - "qs": "~6.5.1", - "safe-buffer": "^5.1.1", - "stringstream": "~0.0.5", - "tough-cookie": "~2.3.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.1.0" - }, - "dependencies": { - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "optional": true - }, - "aws4": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", - "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=", - "optional": true - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "optional": true - }, - "combined-stream": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", - "integrity": "sha1-k4NwpXtKUd6ix3wV1cX9+JUWQAk=", - "requires": { - "delayed-stream": "~1.0.0" - }, - "dependencies": { - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" - } - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "optional": true - }, - "form-data": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.1.tgz", - "integrity": "sha1-b7lPvXGIUwbXPRXMSX/kzE7NRL8=", - "optional": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.5", - "mime-types": "^2.1.12" - }, - "dependencies": { - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "optional": true - } - } - }, - "har-validator": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", - "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", - "optional": true, - "requires": { - "ajv": "^5.1.0", - "har-schema": "^2.0.0" - }, - "dependencies": { - "ajv": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.3.0.tgz", - "integrity": "sha1-RBT/dKUIecII7l/cgm4ywwNUnto=", - "optional": true, - "requires": { - "co": "^4.6.0", - "fast-deep-equal": "^1.0.0", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.3.0" - }, - "dependencies": { - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", - "optional": true - }, - "fast-deep-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz", - "integrity": "sha1-liVqO8l1WV6zbYLpkp0GDYk0Of8=", - "optional": true - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", - "optional": true - }, - "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=", - "optional": true - } - } - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "optional": true - } - } - }, - "hawk": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", - "integrity": "sha1-r02RTrBl+bXOTZ0RwcshJu7MMDg=", - "optional": true, - "requires": { - "boom": "4.x.x", - "cryptiles": "3.x.x", - "hoek": "4.x.x", - "sntp": "2.x.x" - }, - "dependencies": { - "boom": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", - "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", - "optional": true, - "requires": { - "hoek": "4.x.x" - } - }, - "cryptiles": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", - "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", - "optional": true, - "requires": { - "boom": "5.x.x" - }, - "dependencies": { - "boom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", - "integrity": "sha1-XdnabuOl8wIHdDYpDLcX0/SlTgI=", - "optional": true, - "requires": { - "hoek": "4.x.x" - } - } - } - }, - "hoek": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz", - "integrity": "sha1-ctnQdU9/4lyi0BrY+PmpRJqJUm0=" - }, - "sntp": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", - "integrity": "sha1-LGzsFP7cIiJznK+bXD2F0cxaLMg=", - "optional": true, - "requires": { - "hoek": "4.x.x" - } - } - } - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "optional": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - }, - "dependencies": { - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" - }, - "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "optional": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.2.3", - "verror": "1.10.0" - }, - "dependencies": { - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" - }, - "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "optional": true - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "optional": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - }, - "dependencies": { - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "optional": true - } - } - } - } - }, - "sshpk": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", - "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", - "optional": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "tweetnacl": "~0.14.0" - }, - "dependencies": { - "asn1": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", - "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=", - "optional": true - }, - "bcrypt-pbkdf": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", - "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", - "optional": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "optional": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "ecc-jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", - "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", - "optional": true, - "requires": { - "jsbn": "~0.1.0" - } - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "optional": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "optional": true - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "optional": true - } - } - } - } - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "optional": true - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "optional": true - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "optional": true - }, - "mime-types": { - "version": "2.1.17", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz", - "integrity": "sha1-Cdejk/A+mVp5+K+Fe3Cp4KsWVXo=", - "requires": { - "mime-db": "~1.30.0" - }, - "dependencies": { - "mime-db": { - "version": "1.30.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz", - "integrity": "sha1-dMZD2i3Z1qRTmZY0ZbJtXKfXHwE=" - } - } - }, - "oauth-sign": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", - "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=", - "optional": true - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "optional": true - }, - "qs": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "integrity": "sha1-NJzfbu+J7EXBLX1es/wMhwNDptg=", - "optional": true - }, - "safe-buffer": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha1-iTMSr2myEj3vcfV4iQAWce6yyFM=" - }, - "stringstream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", - "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=", - "optional": true - }, - "tough-cookie": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.3.tgz", - "integrity": "sha1-C2GKVWW23qkL80JdBNVe3EdadWE=", - "optional": true, - "requires": { - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "optional": true - } - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "optional": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "uuid": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.1.0.tgz", - "integrity": "sha1-PdPT55Crwk17DToDT/q6vijrvAQ=", - "optional": true - } - } - }, - "when": { - "version": "3.7.8", - "resolved": "https://registry.npmjs.org/when/-/when-3.7.8.tgz", - "integrity": "sha1-xxMLan6gRpPoQs3J56Hyqjmjn4I=", - "optional": true - } - } - } - } - }, "slice-ansi": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-1.0.0.tgz", diff --git a/package.json b/package.json index 7f7ddfa..64769db 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,6 @@ "validate-commit-msg": "^2.14.0" }, "optionalDependencies": { - "slack-node": "~0.2.0", "axios": "^0.15.3", "amqplib": "^0.5.2" }, diff --git a/test/tap/slackAppender-test.js b/test/tap/slackAppender-test.js deleted file mode 100644 index d2a643a..0000000 --- a/test/tap/slackAppender-test.js +++ /dev/null @@ -1,163 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const realLayouts = require('../../lib/layouts'); - -function setupLogging(category, options) { - const msgs = []; - - const slackCredentials = { - token: options.token, - channel_id: options.channel_id, - username: options.username, - format: options.format, - icon_url: options.icon_url - }; - const fakeSlack = (function (key) { - function constructor() { - return { - options: key, - api: function (action, data, callback) { - msgs.push(data); - callback(false, { status: 'sent' }); - } - }; - } - - return constructor(key); - }); - - const fakeLayouts = { - layout: function (type, config) { - this.type = type; - this.config = config; - return realLayouts.messagePassThroughLayout; - }, - basicLayout: realLayouts.basicLayout, - coloredLayout: realLayouts.coloredLayout, - messagePassThroughLayout: realLayouts.messagePassThroughLayout - }; - - const fakeConsole = { - errors: [], - logs: [], - error: function (msg, value) { - this.errors.push({ msg: msg, value: value }); - }, - log: function (msg, value) { - this.logs.push({ msg: msg, value: value }); - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - 'slack-node': fakeSlack, - './layouts': fakeLayouts - }, - globals: { - console: fakeConsole - } - }); - - options.type = 'slack'; - log4js.configure({ - appenders: { - slack: options - }, - categories: { - default: { appenders: ['slack'], level: 'trace' } - } - }); - - return { - logger: log4js.getLogger(category), - mailer: fakeSlack, - layouts: fakeLayouts, - console: fakeConsole, - messages: msgs, - credentials: slackCredentials - }; -} - -function checkMessages(assert, result) { - for (let i = 0; i < result.messages.length; ++i) { - assert.equal(result.messages[i].channel, '#CHANNEL'); - assert.equal(result.messages[i].username, 'USERNAME'); - assert.ok(new RegExp(`.+Log event #${i + 1}`).test(result.messages[i].text)); - } -} - -test('log4js slackAppender', (batch) => { - batch.test('slack setup', (t) => { - const result = setupLogging('slack setup', { - token: 'TOKEN', - channel_id: '#CHANNEL', - username: 'USERNAME', - format: 'FORMAT', - icon_url: 'ICON_URL' - }); - - t.test('slack credentials should match', (assert) => { - assert.equal(result.credentials.token, 'TOKEN'); - assert.equal(result.credentials.channel_id, '#CHANNEL'); - assert.equal(result.credentials.username, 'USERNAME'); - assert.equal(result.credentials.format, 'FORMAT'); - assert.equal(result.credentials.icon_url, 'ICON_URL'); - assert.end(); - }); - t.end(); - }); - - batch.test('basic usage', (t) => { - const setup = setupLogging('basic usage', { - token: 'TOKEN', - channel_id: '#CHANNEL', - username: 'USERNAME', - format: 'FORMAT', - icon_url: 'ICON_URL', - }); - - setup.logger.info('Log event #1'); - - t.equal(setup.messages.length, 1, 'should be one message only'); - checkMessages(t, setup); - t.end(); - }); - - batch.test('config with layout', (t) => { - const result = setupLogging('config with layout', { - layout: { - type: 'tester' - } - }); - t.equal(result.layouts.type, 'tester', 'should configure layout'); - t.end(); - }); - - batch.test('separate notification for each event', (t) => { - const setup = setupLogging('separate notification for each event', { - token: 'TOKEN', - channel_id: '#CHANNEL', - username: 'USERNAME', - format: 'FORMAT', - icon_url: 'ICON_URL', - }); - setTimeout(() => { - setup.logger.info('Log event #1'); - }, 0); - setTimeout(() => { - setup.logger.info('Log event #2'); - }, 500); - setTimeout(() => { - setup.logger.info('Log event #3'); - }, 1100); - setTimeout(() => { - t.equal(setup.messages.length, 3, 'should be three messages'); - checkMessages(t, setup); - t.end(); - }, 3000); - }); - - batch.end(); -}); diff --git a/types/log4js.d.ts b/types/log4js.d.ts index f55561f..1a582ff 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -220,20 +220,6 @@ export interface MultiprocessAppender { loggerHost?: string; } -export interface SlackAppender { - type: 'slack'; - // your Slack API token (see the slack and slack-node docs) - token: string; - // the channel to send log messages - channel_id: string; - // the icon to use for the message - icon_url?: string; - // the username to display with the message - username: string; - // (defaults to basicLayout) - the layout to use for the message. - layout?: Layout; -} - export interface RecordingAppender { type: 'recording'; } @@ -265,7 +251,6 @@ export type Appender = CategoryFilterAppender | LogLevelFilterAppender | MultiFileAppender | MultiprocessAppender - | SlackAppender | RecordingAppender | StandardErrorAppender | StandardOutputAppender From a7349e24a54ba0cac8291ae62b70cbf31f5b0eb7 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Tue, 26 Jun 2018 07:45:42 +1000 Subject: [PATCH 30/34] chore: remove rabbitmq appender --- lib/appenders/rabbitmq.js | 67 ---------------- package-lock.json | 54 +------------ package.json | 3 +- test/tap/rabbitmqAppender-test.js | 126 ------------------------------ 4 files changed, 3 insertions(+), 247 deletions(-) delete mode 100644 lib/appenders/rabbitmq.js delete mode 100644 test/tap/rabbitmqAppender-test.js diff --git a/lib/appenders/rabbitmq.js b/lib/appenders/rabbitmq.js deleted file mode 100644 index e5de5c6..0000000 --- a/lib/appenders/rabbitmq.js +++ /dev/null @@ -1,67 +0,0 @@ -'use strict'; - -/** - * This appender is deprecated and will be removed in version 3.x - * Please make any bug fixes or improvements to https://github.com/log4js-node/rabbitmq - */ -const amqplib = require('amqplib'); - -function rabbitmqAppender(config, layout) { - const host = config.host || '127.0.0.1'; - const port = config.port || 5672; - const username = config.username || 'guest'; - const password = config.password || 'guest'; - const exchange = config.exchange || ''; - const type = config.mq_type || ''; - const durable = config.durable || false; - const routingKey = config.routing_key || 'logstash'; - const con = { - protocol: 'amqp', - hostname: host, - port: port, - username: username, - password: password, - locale: 'en_US', - frameMax: 0, - heartbeat: 0, - vhost: '/', - routing_key: routingKey, - exchange: exchange, - mq_type: type, - durable: durable, - }; - const clientconn = amqplib.connect(con); - clientconn.publish = amqplib.connect(con).publish ? amqplib.connect(con).publish : (client, message) => { - client.then((conn) => { - const rn = conn.createChannel().then((ch) => { - const ok = ch.assertExchange(exchange, type, { durable: durable }); - return ok.then(() => { - ch.publish(exchange, routingKey, Buffer.from(message)); - return ch.close(); - }); - }); - return rn; - }).catch(console.error); - }; - function log(loggingEvent) { - const message = layout(loggingEvent); - clientconn.publish(clientconn, message); - } - log.shutdown = function () { - clientconn.close(); - }; - - log.deprecated = '@log4js-node/rabbitmq'; - return log; -} - -function configure(config, layouts) { - let layout = layouts.messagePassThroughLayout; - if (config.layout) { - layout = layouts.layout(config.layout.type, config.layout); - } - - return rabbitmqAppender(config, layout); -} - -module.exports.configure = configure; diff --git a/package-lock.json b/package-lock.json index 2be272c..e9334d2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -82,19 +82,6 @@ "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", "dev": true }, - "amqplib": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/amqplib/-/amqplib-0.5.2.tgz", - "integrity": "sha512-l9mCs6LbydtHqRniRwYkKdqxVa6XMz3Vw1fh+2gJaaVgTM6Jk3o8RccAKWKtlhT1US5sWrFh+KKxsVUALURSIA==", - "optional": true, - "requires": { - "bitsyntax": "~0.0.4", - "bluebird": "^3.4.6", - "buffer-more-ints": "0.0.2", - "readable-stream": "1.x >=1.1.9", - "safe-buffer": "^5.0.1" - } - }, "ansi-escapes": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.1.0.tgz", @@ -233,19 +220,11 @@ "integrity": "sha512-3/qRXczDi2Cdbz6jE+W3IflJOutRVica8frpBn14de1mBOkzDo+6tY33kNhvkw54Kn3PzRRD2VnGbGPcTAk4sw==", "dev": true }, - "bitsyntax": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/bitsyntax/-/bitsyntax-0.0.4.tgz", - "integrity": "sha1-6xDMb4K4xJDj6FaY8H6D1G4MuoI=", - "optional": true, - "requires": { - "buffer-more-ints": "0.0.2" - } - }, "bluebird": { "version": "3.5.1", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.1.tgz", - "integrity": "sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA==" + "integrity": "sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA==", + "dev": true }, "brace-expansion": { "version": "1.1.11", @@ -263,11 +242,6 @@ "integrity": "sha512-c5mRlguI/Pe2dSZmpER62rSCu0ryKmWddzRYsuXc50U2/g8jMOulc31VZMa4mYx31U5xsmSOpDCgH88Vl9cDGQ==", "dev": true }, - "buffer-more-ints": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/buffer-more-ints/-/buffer-more-ints-0.0.2.tgz", - "integrity": "sha1-JrOIXRD6E9t/wBquOquHAZngEkw=" - }, "builtin-modules": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", @@ -1954,12 +1928,6 @@ "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", "dev": true }, - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", - "optional": true - }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -5659,18 +5627,6 @@ } } }, - "readable-stream": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", - "optional": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, "redent": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/redent/-/redent-2.0.0.tgz", @@ -6039,12 +5995,6 @@ } } }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=", - "optional": true - }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", diff --git a/package.json b/package.json index d5d7216..4bde270 100644 --- a/package.json +++ b/package.json @@ -63,8 +63,7 @@ "validate-commit-msg": "^2.14.0" }, "optionalDependencies": { - "axios": "^0.15.3", - "amqplib": "^0.5.2" + "axios": "^0.15.3" }, "browser": { "os": false diff --git a/test/tap/rabbitmqAppender-test.js b/test/tap/rabbitmqAppender-test.js deleted file mode 100644 index 721cd98..0000000 --- a/test/tap/rabbitmqAppender-test.js +++ /dev/null @@ -1,126 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const appender = require('../../lib/appenders/rabbitmq'); - -function setupLogging(category, options) { - const fakeRabbitmq = { - msgs: [], - connect: function (conn) { - this.port = conn.port; - this.host = conn.hostname; - this.username = conn.username; - this.password = conn.password; - this.routing_key = conn.routing_key; - this.exchange = conn.exchange; - this.mq_type = conn.mq_type; - this.durable = conn.durable; - return { - publish: function (client, message) { - fakeRabbitmq.msgs.push(message); - } - }; - } - }; - - const fakeConsole = { - log: () => {}, - errors: [], - error: function (msg) { - this.errors.push(msg); - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - amqplib: fakeRabbitmq, - }, - globals: { - console: fakeConsole - } - }); - log4js.configure({ - appenders: { rabbitmq: options }, - categories: { default: { appenders: ['rabbitmq'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - fakeRabbitmq: fakeRabbitmq, - fakeConsole: fakeConsole - }; -} - -test('log4js rabbitmqAppender', (batch) => { - batch.test('should export a configure function', (t) => { - t.type(appender.configure, 'function'); - t.end(); - }); - - batch.test('rabbitmq setup', (t) => { - const result = setupLogging('rabbitmq setup', { - host: '123.123.123.123', - port: 5672, - username: 'guest', - password: 'guest', - routing_key: 'logstash', - exchange: 'exchange_logs', - mq_type: 'direct', - durable: true, - type: 'rabbitmq', - layout: { - type: 'pattern', - pattern: 'cheese %m' - } - }); - - result.logger.info('Log event #1'); - - t.test('rabbitmq credentials should match', (assert) => { - assert.equal(result.fakeRabbitmq.host, '123.123.123.123'); - assert.equal(result.fakeRabbitmq.port, 5672); - assert.equal(result.fakeRabbitmq.username, 'guest'); - assert.equal(result.fakeRabbitmq.password, 'guest'); - assert.equal(result.fakeRabbitmq.routing_key, 'logstash'); - assert.equal(result.fakeRabbitmq.exchange, 'exchange_logs'); - assert.equal(result.fakeRabbitmq.mq_type, 'direct'); - assert.equal(result.fakeRabbitmq.durable, true); - assert.equal(result.fakeRabbitmq.msgs.length, 1, 'should be one message only'); - assert.equal(result.fakeRabbitmq.msgs[0], 'cheese Log event #1'); - assert.end(); - }); - - t.end(); - }); - - batch.test('default values', (t) => { - const setup = setupLogging('defaults', { - type: 'rabbitmq' - }); - - setup.logger.info('just testing'); - - t.test('should use localhost', (assert) => { - assert.equal(setup.fakeRabbitmq.host, '127.0.0.1'); - assert.equal(setup.fakeRabbitmq.port, 5672); - assert.equal(setup.fakeRabbitmq.username, 'guest'); - assert.equal(setup.fakeRabbitmq.password, 'guest'); - assert.equal(setup.fakeRabbitmq.exchange, ''); - assert.equal(setup.fakeRabbitmq.mq_type, ''); - assert.equal(setup.fakeRabbitmq.durable, false); - assert.equal(setup.fakeRabbitmq.routing_key, 'logstash'); - assert.end(); - }); - - t.test('should use message pass through layout', (assert) => { - assert.equal(setup.fakeRabbitmq.msgs.length, 1); - assert.equal(setup.fakeRabbitmq.msgs[0], 'just testing'); - assert.end(); - }); - - t.end(); - }); - - batch.end(); -}); From 16603ca4ccd39e48d16aae20c28fe581b0eb231d Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 2 Jul 2018 08:11:47 +1000 Subject: [PATCH 31/34] chore: removed logstash-http appender --- lib/appenders/logstashHTTP.js | 97 ---------------------------- test/tap/logstashHTTP-test.js | 115 ---------------------------------- 2 files changed, 212 deletions(-) delete mode 100644 lib/appenders/logstashHTTP.js delete mode 100644 test/tap/logstashHTTP-test.js diff --git a/lib/appenders/logstashHTTP.js b/lib/appenders/logstashHTTP.js deleted file mode 100644 index b04862c..0000000 --- a/lib/appenders/logstashHTTP.js +++ /dev/null @@ -1,97 +0,0 @@ -/** - * This appender is deprecated, please apply any bugfixes or changes - * to https://github.com/log4js-node/logstash-http - * logstashHTTP appender sends JSON formatted log events to logstashHTTP receivers. - * - * HTTP require 'axios', see 'https://www.npmjs.com/package/axios' - * - * Make sure your project have relevant dependancy installed before using this appender. - */ -/* eslint global-require:0 */ - -'use strict'; - -const util = require('util'); -const axios = require('axios'); - -/** - * - * For HTTP (browsers or node.js) use the following configuration params: - * { - * "type": "logstashHTTP", // must be present for instantiation - * "application": "logstash-test", // name of the application - * "logType": "application", // type of the application - * "logChannel": "test", // channel of the application - * "url": "http://lfs-server/_bulk", // logstash receiver servlet URL - * } - */ -function logstashHTTPAppender(config) { - const sender = axios.create({ - baseURL: config.url, - timeout: config.timeout || 5000, - headers: { 'Content-Type': 'application/x-ndjson' }, - withCredentials: true, - }); - - const appender = function log(event) { - const logstashEvent = [ - { - index: { - _index: config.application, - _type: config.logType, - }, - }, - { - message: format(event.data), // eslint-disable-line - context: event.context, - level: event.level.level / 100, - level_name: event.level.levelStr, - channel: config.logChannel, - datetime: (new Date(event.startTime)).toISOString(), - extra: {}, - }, - ]; - const logstashJSON = `${JSON.stringify(logstashEvent[0])}\n${JSON.stringify(logstashEvent[1])}\n`; - - // send to server - sender.post('', logstashJSON) - .catch((error) => { - if (error.response) { - console.error(`log4js.logstashHTTP Appender error posting to ${config.url}: ${error.response.status} - ${error.response.data}`); - return; - } - console.error(`log4js.logstashHTTP Appender error: ${error.message}`); - }); - }; - - appender.deprecated = '@log4js-node/logstash-http'; - - return appender; -} - -function configure(config) { - return logstashHTTPAppender(config); -} - -function format(logData) { - const data = Array.isArray(logData) - ? logData - : Array.prototype.slice.call(arguments); - return util.format.apply(util, wrapErrorsWithInspect(data)); -} - -function wrapErrorsWithInspect(items) { - return items.map((item) => { - if ((item instanceof Error) && item.stack) { - return { - inspect: function () { - return `${util.format(item)}\n${item.stack}`; - } - }; - } - - return item; - }); -} - -module.exports.configure = configure; diff --git a/test/tap/logstashHTTP-test.js b/test/tap/logstashHTTP-test.js deleted file mode 100644 index fde2ecd..0000000 --- a/test/tap/logstashHTTP-test.js +++ /dev/null @@ -1,115 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const appender = require('../../lib/appenders/logstashHTTP'); - -function setupLogging(category, options) { - const fakeAxios = { - create: function (config) { - this.config = config; - return { - post: function (emptyString, event) { - fakeAxios.args = [emptyString, event]; - return { - catch: function (cb) { - fakeAxios.errorCb = cb; - } - }; - } - }; - } - }; - - const fakeConsole = { - log: () => {}, - error: function (msg) { - this.msg = msg; - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - axios: fakeAxios - }, - globals: { - console: fakeConsole - } - }); - - options.type = 'logstashHTTP'; - log4js.configure({ - appenders: { http: options }, - categories: { default: { appenders: ['http'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - fakeAxios: fakeAxios, - fakeConsole: fakeConsole - }; -} - -test('logstashappender', (batch) => { - batch.test('should export a configure function', (t) => { - t.type(appender.configure, 'function'); - t.end(); - }); - - batch.test('when using HTTP receivers', (t) => { - const setup = setupLogging('myCategory', { - application: 'logstash-sample', - logType: 'application', - logChannel: 'sample', - url: 'http://localhost/receivers/rx1' - }); - - t.test('axios should be configured', (assert) => { - assert.equal(setup.fakeAxios.config.baseURL, 'http://localhost/receivers/rx1'); - assert.equal(setup.fakeAxios.config.timeout, 5000); - assert.equal(setup.fakeAxios.config.withCredentials, true); - assert.same(setup.fakeAxios.config.headers, { 'Content-Type': 'application/x-ndjson' }); - assert.end(); - }); - - setup.logger.addContext('foo', 'bar'); - setup.logger.addContext('bar', 'foo'); - setup.logger.warn('Log event #1'); - - t.test('an event should be sent', (assert) => { - const packet = setup.fakeAxios.args[1].split('\n'); - const eventHeader = JSON.parse(packet[0]); - const eventBody = JSON.parse(packet[1]); - assert.equal(eventHeader.index._index, 'logstash-sample'); - assert.equal(eventHeader.index._type, 'application'); - - assert.equal(eventBody.channel, 'sample'); - assert.equal(eventBody.message, 'Log event #1'); - assert.equal(eventBody.level_name, 'WARN'); - assert.equal(eventBody.context.foo, 'bar'); - assert.equal(eventBody.context.bar, 'foo'); - - // Assert timestamp, up to hours resolution. - const date = new Date(eventBody.datetime); - assert.equal( - date.toISOString().substring(0, 14), - new Date().toISOString().substring(0, 14) - ); - assert.end(); - }); - - t.test('errors should be sent to console.error', (assert) => { - setup.fakeAxios.errorCb({ response: { status: 500, data: 'oh no' } }); - assert.equal( - setup.fakeConsole.msg, - 'log4js.logstashHTTP Appender error posting to http://localhost/receivers/rx1: 500 - oh no' - ); - setup.fakeAxios.errorCb(new Error('oh dear')); - assert.equal(setup.fakeConsole.msg, 'log4js.logstashHTTP Appender error: oh dear'); - assert.end(); - }); - t.end(); - }); - - batch.end(); -}); From 06c56b4b072cf1838b86c5ffd1db93464120765d Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 6 Jul 2018 08:10:06 +1000 Subject: [PATCH 32/34] chore: turned off unused locals complaint for types (caused by levels) --- types/tsconfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/types/tsconfig.json b/types/tsconfig.json index b9eb2d2..35b9d6f 100644 --- a/types/tsconfig.json +++ b/types/tsconfig.json @@ -3,7 +3,7 @@ "compilerOptions": { "strict": true, "noUnusedParameters": true, - "noUnusedLocals": true, + "noUnusedLocals": false, "noEmit": true } } From 3a565667f984cdaf52b2c88cd38cb8d92c42d7f5 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 6 Jul 2018 08:18:24 +1000 Subject: [PATCH 33/34] chore: removed logFaces-HTTP appender --- lib/appenders/logFaces-HTTP.js | 94 ---------------------------- package-lock.json | 29 --------- package.json | 3 - test/tap/logFaces-HTTP-test.js | 109 --------------------------------- types/log4js.d.ts | 11 ---- 5 files changed, 246 deletions(-) delete mode 100644 lib/appenders/logFaces-HTTP.js delete mode 100644 test/tap/logFaces-HTTP-test.js diff --git a/lib/appenders/logFaces-HTTP.js b/lib/appenders/logFaces-HTTP.js deleted file mode 100644 index 397a02c..0000000 --- a/lib/appenders/logFaces-HTTP.js +++ /dev/null @@ -1,94 +0,0 @@ -/** - * This appender is deprecated. All bugfixes and improvements should be made in - * https://github.com/log4js-node/logFaces-HTTP - * - * logFaces appender sends JSON formatted log events to logFaces receivers. - * There are two types of receivers supported - raw UDP sockets (for server side apps), - * and HTTP (for client side apps). Depending on the usage, this appender - * requires either of the two: - * - * For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html' - * For HTTP require 'axios', see 'https://www.npmjs.com/package/axios' - * - * Make sure your project have relevant dependancy installed before using this appender. - */ -/* eslint global-require:0 */ - -'use strict'; - -const util = require('util'); -const axios = require('axios'); - -/** - * - * For HTTP (browsers or node.js) use the following configuration params: - * { - * "type": "logFaces-HTTP", // must be present for instantiation - * "application": "LFS-TEST", // name of the application (domain) - * "url": "http://lfs-server/logs", // logFaces receiver servlet URL - * } - */ -function logFacesAppender(config) { - const sender = axios.create({ - baseURL: config.url, - timeout: config.timeout || 5000, - headers: { 'Content-Type': 'application/json' }, - withCredentials: true - }); - - const appender = function log(event) { - // convert to logFaces compact json format - const lfsEvent = { - a: config.application || '', // application name - t: event.startTime.getTime(), // time stamp - p: event.level.levelStr, // level (priority) - g: event.categoryName, // logger name - m: format(event.data) // message text - }; - - // add context variables if exist - Object.keys(event.context).forEach((key) => { - lfsEvent[`p_${key}`] = event.context[key]; - }); - - // send to server - sender.post('', lfsEvent) - .catch((error) => { - if (error.response) { - console.error(`log4js.logFaces-HTTP Appender error posting to ${config.url}: ${error.response.status} - ${error.response.data}`); - return; - } - console.error(`log4js.logFaces-HTTP Appender error: ${error.message}`); - }); - }; - - appender.deprecated = '@log4js-node/logfaces-http'; - return appender; -} - -function configure(config) { - return logFacesAppender(config); -} - -function format(logData) { - const data = Array.isArray(logData) - ? logData - : Array.prototype.slice.call(arguments); - return util.format.apply(util, wrapErrorsWithInspect(data)); -} - -function wrapErrorsWithInspect(items) { - return items.map((item) => { - if ((item instanceof Error) && item.stack) { - return { - inspect: function () { - return `${util.format(item)}\n${item.stack}`; - } - }; - } - - return item; - }); -} - -module.exports.configure = configure; diff --git a/package-lock.json b/package-lock.json index e9334d2..725572d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -178,15 +178,6 @@ "integrity": "sha512-32NDda82rhwD9/JBCCkB+MRYDp0oSvlo2IL6rQWA10PQi7tDUM3eqMSltXmY+Oyl/7N3P3qNtAlv7X0d9bI28w==", "dev": true }, - "axios": { - "version": "0.15.3", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.15.3.tgz", - "integrity": "sha1-LJ1jiy4ZGgjqHWzJiOrda6W9wFM=", - "optional": true, - "requires": { - "follow-redirects": "1.0.0" - } - }, "babel-code-frame": { "version": "6.26.0", "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", @@ -1298,26 +1289,6 @@ } } }, - "follow-redirects": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.0.0.tgz", - "integrity": "sha1-jjQpjL0uF28lTv/sdaHHjMhJ/Tc=", - "optional": true, - "requires": { - "debug": "^2.2.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "optional": true, - "requires": { - "ms": "2.0.0" - } - } - } - }, "foreground-child": { "version": "1.5.6", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-1.5.6.tgz", diff --git a/package.json b/package.json index 4bde270..6130d42 100644 --- a/package.json +++ b/package.json @@ -62,9 +62,6 @@ "typescript": "^2.8.3", "validate-commit-msg": "^2.14.0" }, - "optionalDependencies": { - "axios": "^0.15.3" - }, "browser": { "os": false }, diff --git a/test/tap/logFaces-HTTP-test.js b/test/tap/logFaces-HTTP-test.js deleted file mode 100644 index 847d81c..0000000 --- a/test/tap/logFaces-HTTP-test.js +++ /dev/null @@ -1,109 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const appender = require('../../lib/appenders/logFaces-HTTP'); - -function setupLogging(category, options) { - const fakeAxios = { - create: function (config) { - this.config = config; - return { - post: function (emptyString, event) { - fakeAxios.args = [emptyString, event]; - return { - catch: function (cb) { - fakeAxios.errorCb = cb; - } - }; - } - }; - } - }; - - const fakeConsole = { - log: () => {}, - error: function (msg) { - this.msg = msg; - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - axios: fakeAxios - }, - globals: { - console: fakeConsole - } - }); - - options.type = 'logFaces-HTTP'; - log4js.configure({ - appenders: { http: options }, - categories: { default: { appenders: ['http'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - fakeAxios: fakeAxios, - fakeConsole: fakeConsole - }; -} - -test('logFaces appender', (batch) => { - batch.test('should export a configure function', (t) => { - t.type(appender.configure, 'function'); - t.end(); - }); - - batch.test('when using HTTP receivers', (t) => { - const setup = setupLogging('myCategory', { - application: 'LFS-HTTP', - url: 'http://localhost/receivers/rx1' - }); - - t.test('axios should be configured', (assert) => { - assert.equal(setup.fakeAxios.config.baseURL, 'http://localhost/receivers/rx1'); - assert.equal(setup.fakeAxios.config.timeout, 5000); - assert.equal(setup.fakeAxios.config.withCredentials, true); - assert.same(setup.fakeAxios.config.headers, { 'Content-Type': 'application/json' }); - assert.end(); - }); - - setup.logger.addContext('foo', 'bar'); - setup.logger.addContext('bar', 'foo'); - setup.logger.warn('Log event #1'); - - t.test('an event should be sent', (assert) => { - const event = setup.fakeAxios.args[1]; - assert.equal(event.a, 'LFS-HTTP'); - assert.equal(event.m, 'Log event #1'); - assert.equal(event.g, 'myCategory'); - assert.equal(event.p, 'WARN'); - assert.equal(event.p_foo, 'bar'); - assert.equal(event.p_bar, 'foo'); - - // Assert timestamp, up to hours resolution. - const date = new Date(event.t); - assert.equal( - date.toISOString().substring(0, 14), - new Date().toISOString().substring(0, 14) - ); - assert.end(); - }); - - t.test('errors should be sent to console.error', (assert) => { - setup.fakeAxios.errorCb({ response: { status: 500, data: 'oh no' } }); - assert.equal( - setup.fakeConsole.msg, - 'log4js.logFaces-HTTP Appender error posting to http://localhost/receivers/rx1: 500 - oh no' - ); - setup.fakeAxios.errorCb(new Error('oh dear')); - assert.equal(setup.fakeConsole.msg, 'log4js.logFaces-HTTP Appender error: oh dear'); - assert.end(); - }); - t.end(); - }); - - batch.end(); -}); diff --git a/types/log4js.d.ts b/types/log4js.d.ts index d6bd4f9..54d02c3 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -168,16 +168,6 @@ export interface DateFileAppender { daysToKeep?: number; } -export interface LogFacesHTTPAppender { - type: 'logFaces-HTTP'; - // logFaces receiver servlet URL - url: string; - // (defaults to empty string) - used to identify your application’s logs - application?: string; - // (defaults to 5000ms) - the timeout for the HTTP request. - timeout?: number; -} - export interface LogFacesUDPAppender { type: 'logFaces-UDP'; // (defaults to ‘127.0.0.1’)- hostname or IP address of the logFaces receiver @@ -246,7 +236,6 @@ export type Appender = CategoryFilterAppender | FileAppender | SyncfileAppender | DateFileAppender - | LogFacesHTTPAppender | LogFacesUDPAppender | LogLevelFilterAppender | MultiFileAppender From 467f67094a38e0e4aac2742d33b2e32c0a56a387 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 12 Jul 2018 19:58:20 +1000 Subject: [PATCH 34/34] chore: removed the logFaces-UDP appender --- lib/appenders/logFaces-UDP.js | 97 -------------------------------- test/tap/logFaces-UDP-test.js | 101 ---------------------------------- types/log4js.d.ts | 11 ---- 3 files changed, 209 deletions(-) delete mode 100644 lib/appenders/logFaces-UDP.js delete mode 100644 test/tap/logFaces-UDP-test.js diff --git a/lib/appenders/logFaces-UDP.js b/lib/appenders/logFaces-UDP.js deleted file mode 100644 index edafe2e..0000000 --- a/lib/appenders/logFaces-UDP.js +++ /dev/null @@ -1,97 +0,0 @@ -/** - * This appender has been deprecated. Any bug fixes or improvements should be - * made against https://github.com/log4js-node/logFaces-UDP - * - * logFaces appender sends JSON formatted log events to logFaces receivers. - * There are two types of receivers supported - raw UDP sockets (for server side apps), - * and HTTP (for client side apps). Depending on the usage, this appender - * requires either of the two: - * - * For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html' - * For HTTP require 'axios', see 'https://www.npmjs.com/package/axios' - * - * Make sure your project have relevant dependancy installed before using this appender. - */ - -'use strict'; - -const util = require('util'); -const dgram = require('dgram'); - -function datagram(config) { - const sock = dgram.createSocket('udp4'); - const host = config.remoteHost || '127.0.0.1'; - const port = config.port || 55201; - - return function (event) { - const buff = Buffer.from(JSON.stringify(event)); - sock.send(buff, 0, buff.length, port, host, (err) => { - if (err) { - console.error(`log4js.logFacesUDPAppender error sending to ${host}:${port}, error: `, err); - } - }); - }; -} - -/** - * For UDP (node.js) use the following configuration params: - * { - * "type": "logFaces-UDP", // must be present for instantiation - * "application": "LFS-TEST", // name of the application (domain) - * "remoteHost": "127.0.0.1", // logFaces server address (hostname) - * "port": 55201 // UDP receiver listening port - * } - * - */ -function logFacesUDPAppender(config) { - const send = datagram(config); - - const appender = function log(event) { - // convert to logFaces compact json format - const lfsEvent = { - a: config.application || '', // application name - t: event.startTime.getTime(), // time stamp - p: event.level.levelStr, // level (priority) - g: event.categoryName, // logger name - m: format(event.data) // message text - }; - - // add context variables if exist - Object.keys(event.context).forEach((key) => { - lfsEvent[`p_${key}`] = event.context[key]; - }); - - // send to server - send(lfsEvent); - }; - - appender.deprecated = '@log4js-node/logfaces-udp'; - return appender; -} - -function configure(config) { - return logFacesUDPAppender(config); -} - -function wrapErrorsWithInspect(items) { - return items.map((item) => { - if ((item instanceof Error) && item.stack) { - return { - inspect: function () { - return `${util.format(item)}\n${item.stack}`; - } - }; - } - - return item; - }); -} - -function format(logData) { - const data = Array.isArray(logData) - ? logData - : Array.prototype.slice.call(arguments); - return util.format.apply(util, wrapErrorsWithInspect(data)); -} - -module.exports.configure = configure; diff --git a/test/tap/logFaces-UDP-test.js b/test/tap/logFaces-UDP-test.js deleted file mode 100644 index 2634dc7..0000000 --- a/test/tap/logFaces-UDP-test.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict'; - -const test = require('tap').test; -const sandbox = require('@log4js-node/sandboxed-module'); -const appender = require('../../lib/appenders/logFaces-UDP'); - -function setupLogging(category, options) { - const fakeDgram = { - createSocket: function (type) { - fakeDgram.type = type; - return { - send: function (buffer, start, end, port, host, cb) { - fakeDgram.buffer = buffer; - fakeDgram.start = start; - fakeDgram.end = end; - fakeDgram.port = port; - fakeDgram.host = host; - fakeDgram.cb = cb; - } - }; - } - }; - - const fakeConsole = { - log: () => {}, - error: function (msg, err) { - this.msg = msg; - this.err = err; - } - }; - - const log4js = sandbox.require('../../lib/log4js', { - requires: { - dgram: fakeDgram - }, - globals: { - console: fakeConsole - } - }); - - options.type = 'logFaces-UDP'; - log4js.configure({ - appenders: { - udp: options - }, - categories: { default: { appenders: ['udp'], level: 'trace' } } - }); - - return { - logger: log4js.getLogger(category), - dgram: fakeDgram, - console: fakeConsole - }; -} - -test('logFaces appender', (batch) => { - batch.test('should export a configure function', (t) => { - t.type(appender.configure, 'function'); - t.end(); - }); - - batch.test('when using UDP receivers', (t) => { - const setup = setupLogging('udpCategory', { - application: 'LFS-UDP', - remoteHost: '127.0.0.1', - port: 55201 - }); - - setup.logger.addContext('foo', 'bar'); - setup.logger.addContext('bar', 'foo'); - setup.logger.error('Log event #2'); - - t.test('an event should be sent', (assert) => { - const event = JSON.parse(setup.dgram.buffer.toString()); - assert.equal(event.a, 'LFS-UDP'); - assert.equal(event.m, 'Log event #2'); - assert.equal(event.g, 'udpCategory'); - assert.equal(event.p, 'ERROR'); - assert.equal(event.p_foo, 'bar'); - assert.equal(event.p_bar, 'foo'); - - // Assert timestamp, up to hours resolution. - const date = new Date(event.t); - assert.equal( - date.toISOString().substring(0, 14), - new Date().toISOString().substring(0, 14) - ); - assert.end(); - }); - - t.test('dgram errors should be sent to console.error', (assert) => { - setup.dgram.cb('something went wrong'); - assert.equal(setup.console.msg, 'log4js.logFacesUDPAppender error sending to 127.0.0.1:55201, error: '); - assert.equal(setup.console.err, 'something went wrong'); - assert.end(); - }); - t.end(); - }); - - batch.end(); -}); diff --git a/types/log4js.d.ts b/types/log4js.d.ts index 54d02c3..5b9aec6 100644 --- a/types/log4js.d.ts +++ b/types/log4js.d.ts @@ -168,16 +168,6 @@ export interface DateFileAppender { daysToKeep?: number; } -export interface LogFacesUDPAppender { - type: 'logFaces-UDP'; - // (defaults to ‘127.0.0.1’)- hostname or IP address of the logFaces receiver - remoteHost?: string; - // (defaults to 55201) - port the logFaces receiver is listening on - port?: number; - // (defaults to empty string) - used to identify your application’s logs - application?: string; -} - export interface LogLevelFilterAppender { type: 'logLevelFilter'; // the name of an appender, defined in the same configuration, that you want to filter @@ -236,7 +226,6 @@ export type Appender = CategoryFilterAppender | FileAppender | SyncfileAppender | DateFileAppender - | LogFacesUDPAppender | LogLevelFilterAppender | MultiFileAppender | MultiprocessAppender