From 5314b7795a05abdc22062eea523480fbaffc70e0 Mon Sep 17 00:00:00 2001 From: Alexander Murauer Date: Wed, 27 Jul 2016 15:14:17 +0200 Subject: [PATCH 01/24] added categoryname to logstash appender --- lib/appenders/logstashUDP.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/appenders/logstashUDP.js b/lib/appenders/logstashUDP.js index 504f3ee..a0e3df2 100644 --- a/lib/appenders/logstashUDP.js +++ b/lib/appenders/logstashUDP.js @@ -33,6 +33,7 @@ function logstashUDP (config, layout) { } } config.fields.level = loggingEvent.level.levelStr; + config.fields.category = loggingEvent.categoryName; var logObject = { "@version" : "1", From 0288f5cb9a0348e8dbd3c5e5ce8ebf6dd7f9e629 Mon Sep 17 00:00:00 2001 From: sparklton Date: Fri, 12 Aug 2016 18:02:40 +0300 Subject: [PATCH 02/24] Added HTTP based sending functionality logFacesAppender can now be used with either UDP or HTTP transport for sending out logs to server. HTTP option is added to specifically target client applications running in browsers. UDP option is mostly for using on server side. --- lib/appenders/logFacesAppender.js | 150 +++++++++++++++++----------- test/logFacesAppender-test.js | 158 +++++++++++++++--------------- 2 files changed, 171 insertions(+), 137 deletions(-) diff --git a/lib/appenders/logFacesAppender.js b/lib/appenders/logFacesAppender.js index 851510c..acd2708 100644 --- a/lib/appenders/logFacesAppender.js +++ b/lib/appenders/logFacesAppender.js @@ -1,71 +1,109 @@ -/** -* logFaces appender sends JSON formatted log events to logFaces server UDP receivers. -* Events contain the following properties: -* - application name (taken from configuration) -* - host name (taken from underlying os) -* - time stamp -* - level -* - logger name (e.g. category) -* - thread name (current process id) -* - message text -*/ - "use strict"; -var dgram = require('dgram'), - layouts = require('../layouts'), - os = require('os'), - util = require('util'); +var util = require('util'); +var context = {}; -try{ - var process = require('process'); -} -catch(error){ - //this module is optional as it may not be available - //in older versions of node.js, so ignore if it failes to load +function datagram(config){ + var sock = require('dgram').createSocket('udp4'); + var host = config.remoteHost || "127.0.0.1"; + var port = config.port || 55201; + + return function(event){ + var buff = new Buffer(JSON.stringify(event)); + sock.send(buff, 0, buff.length, port, host, function(err, bytes) { + if(err){ + console.error("log4js.logFacesAppender send to %s:%d failed, error: %s", host, port, err); + } + }); + } } -function logFacesAppender (config, layout) { - var lfsSock = dgram.createSocket('udp4'); - var localhost = ""; +function servlet(config){ + var axios = require('axios').create(); + axios.defaults.baseURL = config.url; + axios.defaults.timeout = config.timeout || 5000; + axios.defaults.headers = {'Content-Type': 'application/json'}; + axios.defaults.withCredentials = true; - if(os && os.hostname()) - localhost = os.hostname().toString(); + return function(lfsEvent){ + axios.post("", lfsEvent) + .then(function(response){ + if(response.status != 200){ + console.error("log4js.logFacesAppender post to %s failed: %d", config.url, response.status); + } + }) + .catch(function(response){ + console.error("log4js.logFacesAppender post to %s excepted: %s", config.url, response.status); + }); + } +} - var pid = ""; - if(process && process.pid) - pid = process.pid; +/** +* logFaces appender sends JSON formatted log events to logFaces receivers. +* There are two types of receivers targetted - raw UDP sockets and HTTP. +* For UDP (node.js) use the following configuration params: +* { +* "type": "logFacesAppender", +* "application": "LFS-TEST", // name of the application (domain) +* "remoteHost": "127.0.0.1", // logFaces server address (hostname) +* "port": 55201 // UDP receiver listening port +* } +* +* For HTTP (browsers or node.js) use the following configuration params: +* { +* "type": "logFacesAppender", +* "application": "LFS-TEST", // name of the application (domain) +* "url": "http://lfs-server/..", // logFaces receiver binding name +* } +*/ +function logFacesAppender(config) { + var send = config.send; + if(send == undefined){ + send = (config.url == undefined) ? datagram(config) : servlet(config); + } - return function log(loggingEvent) { - var lfsEvent = { - a: config.application || "", // application name - h: localhost, // this host name - t: loggingEvent.startTime.getTime(), // time stamp - p: loggingEvent.level.levelStr, // level (priority) - g: loggingEvent.categoryName, // logger name - r: pid, // thread (process id) - m: layout(loggingEvent) // message text - }; + return function log(event) { + // convert to logFaces compact json format + var lfsEvent = { + a: config.application || "", // application name + t: event.startTime.getTime(), // time stamp + p: event.level.levelStr, // level (priority) + g: event.categoryName, // logger name + m: format(event.data) // message text + } - var buffer = new Buffer(JSON.stringify(lfsEvent)); - var lfsHost = config.remoteHost || "127.0.0.1"; - var lfsPort = config.port || 55201; - lfsSock.send(buffer, 0, buffer.length, lfsPort, lfsHost, function(err, bytes) { - if(err) { - console.error("log4js.logFacesAppender send to %s:%d failed, error: %s", - config.host, config.port, util.inspect(err)); - } - }); - }; + // add context variables if exist + Object.keys(context).forEach(function(key) { + lfsEvent['p_' + key] = context[key]; + }); + + // send to server + send(lfsEvent); + }; } function configure(config) { - var layout; - if (config.layout) - layout = layouts.layout(config.layout.type, config.layout); - else - layout = layouts.layout("pattern", {"type": "pattern", "pattern": "%m"}); - return logFacesAppender(config, layout); + return logFacesAppender(config); +} + +function setContext(key, value){ + context[key] = value; +} + +function format(logData) { + var data = Array.isArray(logData) ? logData : Array.prototype.slice.call(arguments); + return util.format.apply(util, wrapErrorsWithInspect(data)); +} + +function wrapErrorsWithInspect(items) { + return items.map(function(item) { + if ((item instanceof Error) && item.stack) { + return { inspect: function() { return util.format(item) + '\n' + item.stack; } }; + } else { + return item; + } + }); } exports.appender = logFacesAppender; exports.configure = configure; +exports.setContext = setContext; diff --git a/test/logFacesAppender-test.js b/test/logFacesAppender-test.js index 3c2d62c..4949f9c 100644 --- a/test/logFacesAppender-test.js +++ b/test/logFacesAppender-test.js @@ -1,96 +1,92 @@ "use strict"; var vows = require('vows'), - assert = require('assert'), - log4js = require('../lib/log4js'), - sandbox = require('sandboxed-module'); +assert = require('assert'), +log4js = require('../lib/log4js'), +sandbox = require('sandboxed-module'); + +var log = log4js.getLogger('lfstest'); function setupLogging(category, options) { - var udpSent = {}; + var sent = {}; - var fakeDgram = { - createSocket: function (type) { - return { - send: function(buffer, offset, length, port, host, callback) { - udpSent.date = new Date(); - udpSent.host = host; - udpSent.port = port; - udpSent.length = length; - udpSent.offset = 0; - udpSent.buffer = buffer; - callback(undefined, length); - } - }; - } - }; + function fake(event){ + Object.keys(event).forEach(function(key) { + sent[key] = event[key]; + }); + } - var lfsModule = sandbox.require('../lib/appenders/logFacesAppender', { - requires: { - 'dgram': fakeDgram - } - }); - log4js.clearAppenders(); - log4js.addAppender(lfsModule.configure(options), category); + var lfsModule = require('../lib/appenders/logFacesAppender'); + options.send = fake; + log4js.clearAppenders(); + log4js.addAppender(lfsModule.configure(options), category); + lfsModule.setContext("foo", "bar"); + lfsModule.setContext("bar", "foo"); - return { - logger: log4js.getLogger(category), - results: udpSent - }; + return { + logger: log4js.getLogger(category), + results: sent + }; } -vows.describe('logFaces UDP appender').addBatch({ - 'when logging to logFaces UDP receiver': { - topic: function() { - var setup = setupLogging('myCategory', { - "type": "logFacesAppender", - "application": "LFS-TEST", - "remoteHost": "127.0.0.1", - "port": 55201, - "layout": { - "type": "pattern", - "pattern": "%m" - } - }); +vows.describe('logFaces appender').addBatch({ + 'when using HTTP receivers': { + topic: function() { + var setup = setupLogging('myCategory', { + "type": "logFacesAppender", + "application": "LFS-HTTP", + "url": "http://localhost/receivers/rx1" + }); - setup.logger.warn('Log event #1'); - return setup; - }, - 'an UDP packet should be sent': function (topic) { - assert.equal(topic.results.host, "127.0.0.1"); - assert.equal(topic.results.port, 55201); - assert.equal(topic.results.offset, 0); - var json = JSON.parse(topic.results.buffer.toString()); - assert.equal(json.a, 'LFS-TEST'); - assert.equal(json.m, 'Log event #1'); - assert.equal(json.g, 'myCategory'); - assert.equal(json.p, 'WARN'); + setup.logger.warn('Log event #1'); + return setup; + }, + 'an event should be sent': function (topic) { + var event = topic.results; + assert.equal(event.a, 'LFS-HTTP'); + assert.equal(event.m, 'Log event #1'); + assert.equal(event.g, 'myCategory'); + assert.equal(event.p, 'WARN'); + assert.equal(event.p_foo, 'bar'); + assert.equal(event.p_bar, 'foo'); - // Assert timestamp, up to hours resolution. - var date = new Date(json.t); - assert.equal( - date.toISOString().substring(0, 14), - topic.results.date.toISOString().substring(0, 14) - ); - } - }, + // Assert timestamp, up to hours resolution. + var date = new Date(event.t); + assert.equal( + date.toISOString().substring(0, 14), + new Date().toISOString().substring(0, 14) + ); + } + }, - 'when missing options': { - topic: function() { - var setup = setupLogging('myLogger', { - "type": "logFacesAppender", - }); - setup.logger.error('Log event #2'); - return setup; - }, - 'it sets some defaults': function (topic) { - assert.equal(topic.results.host, "127.0.0.1"); - assert.equal(topic.results.port, 55201); + 'when using UDP receivers': { + topic: function() { + var setup = setupLogging('udpCategory', { + "type": "logFacesAppender", + "application": "LFS-UDP", + "remoteHost": "127.0.0.1", + "port": 55201 + }); + + setup.logger.error('Log event #2'); + return setup; + }, + 'an event should be sent': function (topic) { + var event = topic.results; + assert.equal(event.a, 'LFS-UDP'); + assert.equal(event.m, 'Log event #2'); + assert.equal(event.g, 'udpCategory'); + assert.equal(event.p, 'ERROR'); + assert.equal(event.p_foo, 'bar'); + assert.equal(event.p_bar, 'foo'); + + // Assert timestamp, up to hours resolution. + var date = new Date(event.t); + assert.equal( + date.toISOString().substring(0, 14), + new Date().toISOString().substring(0, 14) + ); + } + } - var json = JSON.parse(topic.results.buffer.toString()); - assert.equal(json.a, ""); - assert.equal(json.m, 'Log event #2'); - assert.equal(json.g, 'myLogger'); - assert.equal(json.p, 'ERROR'); - } - } }).export(module); From bf0aa03b142baa3a4e9d28e8a0f9594db7c2644e Mon Sep 17 00:00:00 2001 From: sparklton Date: Fri, 12 Aug 2016 18:33:55 +0300 Subject: [PATCH 03/24] refactoring --- lib/appenders/logFacesAppender.js | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/lib/appenders/logFacesAppender.js b/lib/appenders/logFacesAppender.js index acd2708..283d68b 100644 --- a/lib/appenders/logFacesAppender.js +++ b/lib/appenders/logFacesAppender.js @@ -1,3 +1,5 @@ +/*jslint maxlen: 200 */ + "use strict"; var util = require('util'); var context = {}; @@ -11,7 +13,8 @@ function datagram(config){ var buff = new Buffer(JSON.stringify(event)); sock.send(buff, 0, buff.length, port, host, function(err, bytes) { if(err){ - console.error("log4js.logFacesAppender send to %s:%d failed, error: %s", host, port, err); + console.error("log4js.logFacesAppender failed to %s:%d, error: %s", + host, port, err); } }); } @@ -28,13 +31,15 @@ function servlet(config){ axios.post("", lfsEvent) .then(function(response){ if(response.status != 200){ - console.error("log4js.logFacesAppender post to %s failed: %d", config.url, response.status); + console.error("log4js.logFacesAppender post to %s failed: %d", + config.url, response.status); } }) .catch(function(response){ - console.error("log4js.logFacesAppender post to %s excepted: %s", config.url, response.status); + console.error("log4js.logFacesAppender post to %s excepted: %s", + config.url, response.status); }); - } + }; } /** @@ -57,8 +62,8 @@ function servlet(config){ */ function logFacesAppender(config) { var send = config.send; - if(send == undefined){ - send = (config.url == undefined) ? datagram(config) : servlet(config); + if(send === undefined){ + send = (config.url === undefined) ? datagram(config) : servlet(config); } return function log(event) { From 658f7c45c713d0f79832f137485adeb1e7947b77 Mon Sep 17 00:00:00 2001 From: sparklton Date: Fri, 12 Aug 2016 18:37:38 +0300 Subject: [PATCH 04/24] refactoring --- lib/appenders/logFacesAppender.js | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/appenders/logFacesAppender.js b/lib/appenders/logFacesAppender.js index 283d68b..7f2c6dc 100644 --- a/lib/appenders/logFacesAppender.js +++ b/lib/appenders/logFacesAppender.js @@ -1,5 +1,3 @@ -/*jslint maxlen: 200 */ - "use strict"; var util = require('util'); var context = {}; @@ -13,11 +11,11 @@ function datagram(config){ var buff = new Buffer(JSON.stringify(event)); sock.send(buff, 0, buff.length, port, host, function(err, bytes) { if(err){ - console.error("log4js.logFacesAppender failed to %s:%d, error: %s", + console.error("log4js.logFacesAppender failed to %s:%d, error: %s", host, port, err); } }); - } + }; } function servlet(config){ @@ -74,7 +72,7 @@ function logFacesAppender(config) { p: event.level.levelStr, // level (priority) g: event.categoryName, // logger name m: format(event.data) // message text - } + }; // add context variables if exist Object.keys(context).forEach(function(key) { @@ -95,14 +93,17 @@ function setContext(key, value){ } function format(logData) { - var data = Array.isArray(logData) ? logData : Array.prototype.slice.call(arguments); + var data = Array.isArray(logData) ? + logData : Array.prototype.slice.call(arguments); return util.format.apply(util, wrapErrorsWithInspect(data)); } function wrapErrorsWithInspect(items) { return items.map(function(item) { if ((item instanceof Error) && item.stack) { - return { inspect: function() { return util.format(item) + '\n' + item.stack; } }; + return { inspect: function() { + return util.format(item) + '\n' + item.stack; + }}; } else { return item; } From b7cf2f1e92704d40645058148da4b066754791cc Mon Sep 17 00:00:00 2001 From: Jacob Hoffman-Andrews Date: Sun, 28 Aug 2016 14:40:47 -0400 Subject: [PATCH 05/24] Add close-on-SIGHUP behavior for logrotate. --- lib/appenders/file.js | 75 ++++++++++++++++++++++++++----------------- 1 file changed, 46 insertions(+), 29 deletions(-) diff --git a/lib/appenders/file.js b/lib/appenders/file.js index 6788d30..9c71edc 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -11,7 +11,16 @@ var layouts = require('../layouts') //close open files on process exit. process.on('exit', function() { openFiles.forEach(function (file) { - file.end(); + file.stream.end(); + }); +}); + +// On SIGHUP, close and reopen all files. This allows this appender to work with +// logrotate. Note that if you are using logrotate, you should not set +// `logSize`. +process.on('SIGHUP', function() { + openFiles.forEach(function(writer) { + writer.reopen(); }); }); @@ -34,38 +43,45 @@ function fileAppender (file, layout, logSize, numBackups) { //there has to be at least one backup if logSize has been specified numBackups = numBackups === 0 ? 1 : numBackups; - function openTheStream(file, fileSize, numFiles) { - var stream; - if (fileSize) { - stream = new streams.RollingFileStream( - file, - fileSize, - numFiles - ); - } else { - stream = fs.createWriteStream( - file, - { encoding: "utf8", - mode: parseInt('0644', 8), - flags: 'a' } - ); + var writer = { + stream: openTheStream(file, logSize, numBackups), + reopen: function() { + this.stream.end(); + this.stream = openTheStream(file, logSize, numBackups); } - stream.on("error", function (err) { - console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err); - }); - return stream; } - var logFile = openTheStream(file, logSize, numBackups); - // push file to the stack of open handlers - openFiles.push(logFile); - + openFiles.push(writer); + return function(loggingEvent) { - logFile.write(layout(loggingEvent) + eol, "utf8"); + writer.stream.write(layout(loggingEvent) + eol, "utf8"); }; } +function openTheStream(file, fileSize, numFiles) { + var stream; + if (fileSize) { + stream = new streams.RollingFileStream( + file, + fileSize, + numFiles + ); + } else { + stream = fs.createWriteStream( + file, + { encoding: "utf8", + mode: parseInt('0644', 8), + flags: 'a' } + ); + } + stream.on("error", function (err) { + console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err); + }); + return stream; +} + + function configure(config, options) { var layout; if (config.layout) { @@ -81,12 +97,13 @@ function configure(config, options) { function shutdown(cb) { async.each(openFiles, function(file, done) { - if (!file.write(eol, "utf-8")) { - file.once('drain', function() { - file.end(done); + var stream = file.stream; + if (!stream.write(eol, "utf-8")) { + stream.once('drain', function() { + stream.end(done); }); } else { - file.end(done); + stream.end(done); } }, cb); } From afd42bd3d925e5e7a2e452e5ee527775dc283433 Mon Sep 17 00:00:00 2001 From: chouhom Date: Fri, 23 Sep 2016 15:28:17 +0800 Subject: [PATCH 06/24] update moduel location --- examples/memory-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/memory-test.js b/examples/memory-test.js index ac2ae04..4cc6f2d 100644 --- a/examples/memory-test.js +++ b/examples/memory-test.js @@ -1,4 +1,4 @@ -var log4js = require('./lib/log4js') +var log4js = require('../lib/log4js') , logger , usage , i; From 2b84671471667a59bf3a7c3b61dad64ae127c7a9 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 23 Oct 2016 11:50:53 +1100 Subject: [PATCH 07/24] changed default appender to stdout, replaced streams with streamroller, started removal of vows tests --- lib/appenders/dateFile.js | 2 +- lib/appenders/file.js | 2 +- lib/appenders/fileSync.js | 3 +- lib/appenders/gelf.js | 2 +- lib/appenders/stdout.js | 21 ++ lib/debug.js | 15 -- lib/log4js.js | 32 ++- lib/logger.js | 15 +- lib/streams/BaseRollingFileStream.js | 94 -------- lib/streams/DateRollingFileStream.js | 91 -------- lib/streams/RollingFileStream.js | 117 ---------- lib/streams/index.js | 3 - package.json | 9 +- test/debug-test.js | 72 ------- test/stderrAppender-test.js | 35 --- test/streams/BaseRollingFileStream-test.js | 93 -------- test/streams/DateRollingFileStream-test.js | 227 -------------------- test/streams/rollingFileStream-test.js | 207 ------------------ test/tape/default-settings-test.js | 33 +++ test/tape/stderrAppender-test.js | 22 ++ test/tape/stdoutAppender-test.js | 22 ++ test/{ => vows}/categoryFilter-test.js | 8 +- test/{ => vows}/clusteredAppender-test.js | 8 +- test/{ => vows}/configuration-test.js | 12 +- test/{ => vows}/configureNoLevels-test.js | 4 +- test/{ => vows}/connect-logger-test.js | 4 +- test/{ => vows}/consoleAppender-test.js | 8 +- test/{ => vows}/dateFileAppender-test.js | 76 +++---- test/{ => vows}/date_format-test.js | 2 +- test/{ => vows}/fileAppender-test.js | 36 ++-- test/{ => vows}/fileSyncAppender-test.js | 22 +- test/{ => vows}/gelfAppender-test.js | 6 +- test/{ => vows}/global-log-level-test.js | 2 +- test/{ => vows}/hipchatAppender-test.js | 4 +- test/{ => vows}/layouts-test.js | 40 ++-- test/{ => vows}/levels-test.js | 2 +- test/{ => vows}/log-abspath-test.js | 18 +- test/{ => vows}/log4js.json | 0 test/{ => vows}/logFacesAppender-test.js | 4 +- test/{ => vows}/logLevelFilter-test.js | 8 +- test/{ => vows}/logger-test.js | 6 +- test/{ => vows}/logging-test.js | 40 ++-- test/{ => vows}/logglyAppender-test.js | 4 +- test/{ => vows}/logstashUDP-test.js | 4 +- test/{ => vows}/mailgunAppender-test.js | 4 +- test/{ => vows}/multiprocess-test.js | 12 +- test/{ => vows}/newLevel-test.js | 8 +- test/{ => vows}/nolog-test.js | 4 +- test/{ => vows}/reloadConfiguration-test.js | 16 +- test/{ => vows}/setLevel-asymmetry-test.js | 2 +- test/{ => vows}/slackAppender-test.js | 4 +- test/{ => vows}/smtpAppender-test.js | 4 +- test/{ => vows}/subcategories-test.js | 4 +- test/vows/with-categoryFilter.json | 23 ++ test/vows/with-dateFile.json | 17 ++ test/{ => vows}/with-log-rolling.json | 0 test/{ => vows}/with-logLevelFilter.json | 28 +-- test/with-categoryFilter.json | 23 -- test/with-dateFile.json | 17 -- 59 files changed, 381 insertions(+), 1220 deletions(-) create mode 100644 lib/appenders/stdout.js delete mode 100644 lib/debug.js delete mode 100644 lib/streams/BaseRollingFileStream.js delete mode 100644 lib/streams/DateRollingFileStream.js delete mode 100644 lib/streams/RollingFileStream.js delete mode 100644 lib/streams/index.js delete mode 100644 test/debug-test.js delete mode 100644 test/stderrAppender-test.js delete mode 100644 test/streams/BaseRollingFileStream-test.js delete mode 100644 test/streams/DateRollingFileStream-test.js delete mode 100644 test/streams/rollingFileStream-test.js create mode 100644 test/tape/default-settings-test.js create mode 100644 test/tape/stderrAppender-test.js create mode 100644 test/tape/stdoutAppender-test.js rename test/{ => vows}/categoryFilter-test.js (90%) rename test/{ => vows}/clusteredAppender-test.js (94%) rename test/{ => vows}/configuration-test.js (94%) rename test/{ => vows}/configureNoLevels-test.js (98%) rename test/{ => vows}/connect-logger-test.js (98%) rename test/{ => vows}/consoleAppender-test.js (82%) rename test/{ => vows}/dateFileAppender-test.js (84%) rename test/{ => vows}/date_format-test.js (97%) rename test/{ => vows}/fileAppender-test.js (93%) rename test/{ => vows}/fileSyncAppender-test.js (92%) rename test/{ => vows}/gelfAppender-test.js (97%) rename test/{ => vows}/global-log-level-test.js (98%) rename test/{ => vows}/hipchatAppender-test.js (97%) rename test/{ => vows}/layouts-test.js (92%) rename test/{ => vows}/levels-test.js (99%) rename test/{ => vows}/log-abspath-test.js (89%) rename test/{ => vows}/log4js.json (100%) rename test/{ => vows}/logFacesAppender-test.js (95%) rename test/{ => vows}/logLevelFilter-test.js (92%) rename test/{ => vows}/logger-test.js (94%) rename test/{ => vows}/logging-test.js (95%) rename test/{ => vows}/logglyAppender-test.js (96%) rename test/{ => vows}/logstashUDP-test.js (96%) rename test/{ => vows}/mailgunAppender-test.js (98%) rename test/{ => vows}/multiprocess-test.js (97%) rename test/{ => vows}/newLevel-test.js (95%) rename test/{ => vows}/nolog-test.js (99%) rename test/{ => vows}/reloadConfiguration-test.js (96%) rename test/{ => vows}/setLevel-asymmetry-test.js (98%) rename test/{ => vows}/slackAppender-test.js (97%) rename test/{ => vows}/smtpAppender-test.js (98%) rename test/{ => vows}/subcategories-test.js (97%) create mode 100644 test/vows/with-categoryFilter.json create mode 100644 test/vows/with-dateFile.json rename test/{ => vows}/with-log-rolling.json (100%) rename test/{ => vows}/with-logLevelFilter.json (55%) delete mode 100644 test/with-categoryFilter.json delete mode 100644 test/with-dateFile.json diff --git a/lib/appenders/dateFile.js b/lib/appenders/dateFile.js index 55c8fd8..86635d7 100644 --- a/lib/appenders/dateFile.js +++ b/lib/appenders/dateFile.js @@ -1,5 +1,5 @@ "use strict"; -var streams = require('../streams') +var streams = require('streamroller') , layouts = require('../layouts') , path = require('path') , os = require('os') diff --git a/lib/appenders/file.js b/lib/appenders/file.js index 6088377..6ee01d1 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -2,7 +2,7 @@ var layouts = require('../layouts') , path = require('path') , fs = require('fs') -, streams = require('../streams') +, streams = require('streamroller') , os = require('os') , eol = os.EOL || '\n' , openFiles = [] diff --git a/lib/appenders/fileSync.js b/lib/appenders/fileSync.js index a8befc9..ef4bfbb 100755 --- a/lib/appenders/fileSync.js +++ b/lib/appenders/fileSync.js @@ -1,9 +1,8 @@ "use strict"; -var debug = require('../debug')('fileSync') +var debug = require('debug')('log4js:fileSync') , layouts = require('../layouts') , path = require('path') , fs = require('fs') -, streams = require('../streams') , os = require('os') , eol = os.EOL || '\n' ; diff --git a/lib/appenders/gelf.js b/lib/appenders/gelf.js index a367ef5..4f4093d 100644 --- a/lib/appenders/gelf.js +++ b/lib/appenders/gelf.js @@ -4,7 +4,7 @@ var layouts = require('../layouts'); var levels = require('../levels'); var dgram = require('dgram'); var util = require('util'); -var debug = require('../debug')('GELF Appender'); +var debug = require('debug')('log4js:gelf'); var LOG_EMERG=0; // system is unusable var LOG_ALERT=1; // action must be taken immediately diff --git a/lib/appenders/stdout.js b/lib/appenders/stdout.js new file mode 100644 index 0000000..888b916 --- /dev/null +++ b/lib/appenders/stdout.js @@ -0,0 +1,21 @@ +"use strict"; + +var layouts = require('../layouts'); + +function stdoutAppender(layout, timezoneOffset) { + layout = layout || layouts.colouredLayout; + return function(loggingEvent) { + process.stdout.write(layout(loggingEvent, timezoneOffset) + '\n'); + }; +} + +function configure(config) { + var layout; + if (config.layout) { + layout = layouts.layout(config.layout.type, config.layout); + } + return stdoutAppender(layout, config.timezoneOffset); +} + +exports.appender = stdoutAppender; +exports.configure = configure; diff --git a/lib/debug.js b/lib/debug.js deleted file mode 100644 index e3e6581..0000000 --- a/lib/debug.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; - -module.exports = function(label) { - var debug; - - if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) { - debug = function(message) { - console.error('LOG4JS: (%s) %s', label, message); - }; - } else { - debug = function() { }; - } - - return debug; -}; diff --git a/lib/log4js.js b/lib/log4js.js index 629aed5..4db9dec 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -59,13 +59,11 @@ var events = require('events') , appenderShutdowns = {} , defaultConfig = { appenders: [ - { type: "console" } + { type: "stdout" } ], replaceConsole: false }; -require('./appenders/console'); - function hasLogger(logger) { return loggers.hasOwnProperty(logger); } @@ -115,7 +113,7 @@ function normalizeCategory (category) { return category + '.'; } -function doesLevelEntryContainsLogger (levelCategory, loggerCategory) { +function doesLevelEntryContainsLogger (levelCategory, loggerCategory) { var normalizedLevelCategory = normalizeCategory(levelCategory); var normalizedLoggerCategory = normalizeCategory(loggerCategory); return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory; //jshint ignore:line @@ -160,7 +158,7 @@ function getLogger (loggerCategoryName) { } } /* jshint +W073 */ - + // Create the logger for this name if it doesn't already exist loggers[loggerCategoryName] = new Logger(loggerCategoryName, level); @@ -183,7 +181,7 @@ function getLogger (loggerCategoryName) { }); } } - + return loggers[loggerCategoryName]; } @@ -200,10 +198,10 @@ function addAppender () { if (Array.isArray(args[0])) { args = args[0]; } - + args.forEach(function(appenderCategory) { addAppenderToCategory(appender, appenderCategory); - + if (appenderCategory === ALL_CATEGORIES) { addAppenderToAllLoggers(appender); } else { @@ -213,7 +211,7 @@ function addAppender () { loggers[loggerCategory].addListener("log", appender); } } - + } }); } @@ -306,7 +304,7 @@ function configureOnceOff(config, options) { try { configureLevels(config.levels); configureAppenders(config.appenders, options); - + if (config.replaceConsole) { replaceConsole(); } else { @@ -314,7 +312,7 @@ function configureOnceOff(config, options) { } } catch (e) { throw new Error( - "Problem reading log4js config " + util.inspect(config) + + "Problem reading log4js config " + util.inspect(config) + ". Error was \"" + e.message + "\" (" + e.stack + ")" ); } @@ -324,7 +322,7 @@ function configureOnceOff(config, options) { function reloadConfiguration(options) { var mtime = getMTime(configState.filename); if (!mtime) return; - + if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) { configureOnceOff(loadConfigurationFile(configState.filename), options); } @@ -355,7 +353,7 @@ function configure(configurationFileOrObject, options) { var config = configurationFileOrObject; config = config || process.env.LOG4JS_CONFIG; options = options || {}; - + if (config === undefined || config === null || typeof(config) === 'string') { if (options.reloadSecs) { initReloadConfiguration(config, options); @@ -481,19 +479,19 @@ module.exports = { getLogger: getLogger, getDefaultLogger: getDefaultLogger, hasLogger: hasLogger, - + addAppender: addAppender, loadAppender: loadAppender, clearAppenders: clearAppenders, configure: configure, shutdown: shutdown, - + replaceConsole: replaceConsole, restoreConsole: restoreConsole, - + levels: levels, setGlobalLogLevel: setGlobalLogLevel, - + layouts: layouts, appenders: {}, appenderMakers: appenderMakers, diff --git a/lib/logger.js b/lib/logger.js index 984bd38..75ac7ae 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -66,11 +66,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) { return this.level.isLessThanOrEqualTo(otherLevel); }; -['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach( - function(levelString) { - addLevelMethods(levelString); - } -); +['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(addLevelMethods); function addLevelMethods(level) { level = levels.toLevel(level); @@ -80,16 +76,13 @@ function addLevelMethods(level) { var isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1); Logger.prototype['is'+isLevelMethod+'Enabled'] = function() { - return this.isLevelEnabled(level.toString()); + return this.isLevelEnabled(level); }; Logger.prototype[levelMethod] = function () { if (logWritesEnabled && this.isLevelEnabled(level)) { var numArgs = arguments.length; - var args = new Array(numArgs); - for (var i = 0; i < numArgs; i++) { - args[i] = arguments[i]; - } + var args = Array.prototype.slice.call(arguments); this._log(level, args); } }; @@ -120,4 +113,4 @@ exports.LoggingEvent = LoggingEvent; exports.Logger = Logger; exports.disableAllLogWrites = disableAllLogWrites; exports.enableAllLogWrites = enableAllLogWrites; -exports.addLevelMethods = addLevelMethods; \ No newline at end of file +exports.addLevelMethods = addLevelMethods; diff --git a/lib/streams/BaseRollingFileStream.js b/lib/streams/BaseRollingFileStream.js deleted file mode 100644 index 9c441ad..0000000 --- a/lib/streams/BaseRollingFileStream.js +++ /dev/null @@ -1,94 +0,0 @@ -"use strict"; -var fs = require('fs') -, stream -, debug = require('../debug')('BaseRollingFileStream') -, util = require('util') -, semver = require('semver'); - -if (semver.satisfies(process.version, '>=0.10.0')) { - stream = require('stream'); -} else { - stream = require('readable-stream'); -} - -module.exports = BaseRollingFileStream; - -function BaseRollingFileStream(filename, options) { - debug("In BaseRollingFileStream"); - this.filename = filename; - this.options = options || {}; - this.options.encoding = this.options.encoding || 'utf8'; - this.options.mode = this.options.mode || parseInt('0644', 8); - this.options.flags = this.options.flags || 'a'; - - this.currentSize = 0; - - function currentFileSize(file) { - var fileSize = 0; - try { - fileSize = fs.statSync(file).size; - } catch (e) { - // file does not exist - } - return fileSize; - } - - function throwErrorIfArgumentsAreNotValid() { - if (!filename) { - throw new Error("You must specify a filename"); - } - } - - throwErrorIfArgumentsAreNotValid(); - debug("Calling BaseRollingFileStream.super"); - BaseRollingFileStream.super_.call(this); - this.openTheStream(); - this.currentSize = currentFileSize(this.filename); -} -util.inherits(BaseRollingFileStream, stream.Writable); - -BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) { - var that = this; - function writeTheChunk() { - debug("writing the chunk to the underlying stream"); - that.currentSize += chunk.length; - try { - that.theStream.write(chunk, encoding, callback); - } - catch (err){ - debug(err); - callback(); - } - } - - debug("in _write"); - - if (this.shouldRoll()) { - this.currentSize = 0; - this.roll(this.filename, writeTheChunk); - } else { - writeTheChunk(); - } -}; - -BaseRollingFileStream.prototype.openTheStream = function(cb) { - debug("opening the underlying stream"); - this.theStream = fs.createWriteStream(this.filename, this.options); - if (cb) { - this.theStream.on("open", cb); - } -}; - -BaseRollingFileStream.prototype.closeTheStream = function(cb) { - debug("closing the underlying stream"); - this.theStream.end(cb); -}; - -BaseRollingFileStream.prototype.shouldRoll = function() { - return false; // default behaviour is never to roll -}; - -BaseRollingFileStream.prototype.roll = function(filename, callback) { - callback(); // default behaviour is not to do anything -}; - diff --git a/lib/streams/DateRollingFileStream.js b/lib/streams/DateRollingFileStream.js deleted file mode 100644 index 5ef2081..0000000 --- a/lib/streams/DateRollingFileStream.js +++ /dev/null @@ -1,91 +0,0 @@ -"use strict"; -var BaseRollingFileStream = require('./BaseRollingFileStream') -, debug = require('../debug')('DateRollingFileStream') -, format = require('../date_format') -, fs = require('fs') -, util = require('util'); - -module.exports = DateRollingFileStream; - -function findTimestampFromFileIfExists(filename, now) { - return fs.existsSync(filename) ? fs.statSync(filename).mtime : new Date(now()); -} - -function DateRollingFileStream(filename, pattern, options, now) { - debug("Now is " + now); - if (pattern && typeof(pattern) === 'object') { - now = options; - options = pattern; - pattern = null; - } - this.pattern = pattern || '.yyyy-MM-dd'; - this.now = now || Date.now; - this.lastTimeWeWroteSomething = format.asString( - this.pattern, - findTimestampFromFileIfExists(filename, this.now) - ); - - this.baseFilename = filename; - this.alwaysIncludePattern = false; - - if (options) { - if (options.alwaysIncludePattern) { - this.alwaysIncludePattern = true; - filename = this.baseFilename + this.lastTimeWeWroteSomething; - } - delete options.alwaysIncludePattern; - if (Object.keys(options).length === 0) { - options = null; - } - } - debug("this.now is " + this.now + ", now is " + now); - - DateRollingFileStream.super_.call(this, filename, options); -} -util.inherits(DateRollingFileStream, BaseRollingFileStream); - -DateRollingFileStream.prototype.shouldRoll = function() { - var lastTime = this.lastTimeWeWroteSomething, - thisTime = format.asString(this.pattern, new Date(this.now())); - - debug("DateRollingFileStream.shouldRoll with now = " + - this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime); - - this.lastTimeWeWroteSomething = thisTime; - this.previousTime = lastTime; - - return thisTime !== lastTime; -}; - -DateRollingFileStream.prototype.roll = function(filename, callback) { - var that = this; - - debug("Starting roll"); - - if (this.alwaysIncludePattern) { - this.filename = this.baseFilename + this.lastTimeWeWroteSomething; - this.closeTheStream(this.openTheStream.bind(this, callback)); - } else { - var newFilename = this.baseFilename + this.previousTime; - this.closeTheStream( - deleteAnyExistingFile.bind(null, - renameTheCurrentFile.bind(null, - this.openTheStream.bind(this, - callback)))); - } - - function deleteAnyExistingFile(cb) { - //on windows, you can get a EEXIST error if you rename a file to an existing file - //so, we'll try to delete the file we're renaming to first - fs.unlink(newFilename, function (err) { - //ignore err: if we could not delete, it's most likely that it doesn't exist - cb(); - }); - } - - function renameTheCurrentFile(cb) { - debug("Renaming the " + filename + " -> " + newFilename); - fs.rename(filename, newFilename, cb); - } - -}; diff --git a/lib/streams/RollingFileStream.js b/lib/streams/RollingFileStream.js deleted file mode 100644 index af1e52e..0000000 --- a/lib/streams/RollingFileStream.js +++ /dev/null @@ -1,117 +0,0 @@ -"use strict"; -var BaseRollingFileStream = require('./BaseRollingFileStream') -, debug = require('../debug')('RollingFileStream') -, util = require('util') -, path = require('path') -, child_process = require('child_process') -, zlib = require("zlib") -, fs = require('fs'); - -module.exports = RollingFileStream; - -function RollingFileStream (filename, size, backups, options) { - this.size = size; - this.backups = backups || 1; - - function throwErrorIfArgumentsAreNotValid() { - if (!filename || !size || size <= 0) { - throw new Error("You must specify a filename and file size"); - } - } - - throwErrorIfArgumentsAreNotValid(); - - RollingFileStream.super_.call(this, filename, options); -} -util.inherits(RollingFileStream, BaseRollingFileStream); - -RollingFileStream.prototype.shouldRoll = function() { - debug("should roll with current size " + this.currentSize + " and max size " + this.size); - return this.currentSize >= this.size; -}; - -RollingFileStream.prototype.roll = function(filename, callback) { - var that = this, - nameMatcher = new RegExp('^' + path.basename(filename)); - - function justTheseFiles (item) { - return nameMatcher.test(item); - } - - function index(filename_) { - debug('Calculating index of '+filename_); - return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0; - } - - function byIndex(a, b) { - if (index(a) > index(b)) { - return 1; - } else if (index(a) < index(b) ) { - return -1; - } else { - return 0; - } - } - - function compress (filename, cb) { - - var gzip = zlib.createGzip(); - var inp = fs.createReadStream(filename); - var out = fs.createWriteStream(filename+".gz"); - inp.pipe(gzip).pipe(out); - fs.unlink(filename, cb); - - } - - function increaseFileIndex (fileToRename, cb) { - var idx = index(fileToRename); - debug('Index of ' + fileToRename + ' is ' + idx); - if (idx < that.backups) { - - var ext = path.extname(fileToRename); - var destination = filename + '.' + (idx+1); - if (that.options.compress && /^gz$/.test(ext.substring(1))) { - destination+=ext; - } - //on windows, you can get a EEXIST error if you rename a file to an existing file - //so, we'll try to delete the file we're renaming to first - fs.unlink(destination, function (err) { - //ignore err: if we could not delete, it's most likely that it doesn't exist - debug('Renaming ' + fileToRename + ' -> ' + destination); - fs.rename(path.join(path.dirname(filename), fileToRename), destination, function(err) { - if (err) { - cb(err); - } else { - if (that.options.compress && ext!=".gz") { - compress(destination, cb); - } else { - cb(); - } - } - }); - }); - } else { - cb(); - } - } - - function renameTheFiles(cb) { - //roll the backups (rename file.n to file.n+1, where n <= numBackups) - debug("Renaming the old files"); - fs.readdir(path.dirname(filename), function (err, files) { - var filesToProcess = files.filter(justTheseFiles).sort(byIndex); - (function processOne(err) { - var file = filesToProcess.pop(); - if (!file || err) { return cb(err); } - increaseFileIndex(file, processOne); - })(); - }); - } - - debug("Rolling, rolling, rolling"); - this.closeTheStream( - renameTheFiles.bind(null, - this.openTheStream.bind(this, - callback))); - -}; diff --git a/lib/streams/index.js b/lib/streams/index.js deleted file mode 100644 index d8e026d..0000000 --- a/lib/streams/index.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -exports.RollingFileStream = require('./RollingFileStream'); -exports.DateRollingFileStream = require('./DateRollingFileStream'); diff --git a/package.json b/package.json index 202a9b8..c061217 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "log4js", - "version": "0.6.38", + "version": "1.0.0", "description": "Port of Log4js to work with node.", "keywords": [ "logging", @@ -23,19 +23,20 @@ }, "scripts": { "pretest": "jshint lib/ test/", - "test": "vows" + "test": "tape 'test/tape/**/*.js' && vows test/vows/*.js" }, "directories": { "test": "test", "lib": "lib" }, "dependencies": { - "readable-stream": "~1.0.2", - "semver": "~4.3.3" + "debug": "^2.2.0", + "streamroller": "^0.1.0" }, "devDependencies": { "jshint": "^2.9.2", "sandboxed-module": "0.1.3", + "tape": "^4.6.2", "vows": "0.7.0" }, "browser": { diff --git a/test/debug-test.js b/test/debug-test.js deleted file mode 100644 index 92dd915..0000000 --- a/test/debug-test.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, sandbox = require('sandboxed-module') -, fakeConsole = { - error: function(format, label, message) { - this.logged = [ format, label, message ]; - } -} -, globals = function(debugValue) { - return { - process: { - env: { - 'NODE_DEBUG': debugValue - } - }, - console: fakeConsole - }; -}; - -vows.describe('../lib/debug').addBatch({ - 'when NODE_DEBUG is set to log4js': { - topic: function() { - var debug = sandbox.require( - '../lib/debug', - { 'globals': globals('log4js') } - ); - - fakeConsole.logged = []; - debug('cheese')('biscuits'); - return fakeConsole.logged; - }, - 'it should log to console.error': function(logged) { - assert.equal(logged[0], 'LOG4JS: (%s) %s'); - assert.equal(logged[1], 'cheese'); - assert.equal(logged[2], 'biscuits'); - } - }, - - 'when NODE_DEBUG is set to not log4js': { - topic: function() { - var debug = sandbox.require( - '../lib/debug', - { globals: globals('other_module') } - ); - - fakeConsole.logged = []; - debug('cheese')('biscuits'); - return fakeConsole.logged; - }, - 'it should not log to console.error': function(logged) { - assert.equal(logged.length, 0); - } - }, - - 'when NODE_DEBUG is not set': { - topic: function() { - var debug = sandbox.require( - '../lib/debug', - { globals: globals(null) } - ); - - fakeConsole.logged = []; - debug('cheese')('biscuits'); - return fakeConsole.logged; - }, - 'it should not log to console.error': function(logged) { - assert.equal(logged.length, 0); - } - } - -}).exportTo(module); diff --git a/test/stderrAppender-test.js b/test/stderrAppender-test.js deleted file mode 100644 index c4244d1..0000000 --- a/test/stderrAppender-test.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -var assert = require('assert') -, vows = require('vows') -, layouts = require('../lib/layouts') -, sandbox = require('sandboxed-module'); - -vows.describe('../lib/appenders/stderr').addBatch({ - 'appender': { - topic: function() { - var messages = [] - , fakeProcess = { - stderr: { - write: function(msg) { messages.push(msg); } - } - } - , appenderModule = sandbox.require( - '../lib/appenders/stderr', - { - globals: { - 'process': fakeProcess - } - } - ) - , appender = appenderModule.appender(layouts.messagePassThroughLayout); - - appender({ data: ["blah"] }); - return messages; - }, - - 'should output to stderr': function(messages) { - assert.equal(messages[0], 'blah\n'); - } - } - -}).exportTo(module); diff --git a/test/streams/BaseRollingFileStream-test.js b/test/streams/BaseRollingFileStream-test.js deleted file mode 100644 index a414d5a..0000000 --- a/test/streams/BaseRollingFileStream-test.js +++ /dev/null @@ -1,93 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, fs = require('fs') -, sandbox = require('sandboxed-module'); - -vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({ - 'when node version < 0.10.0': { - topic: function() { - var streamLib = sandbox.load( - '../../lib/streams/BaseRollingFileStream', - { - globals: { - process: { - version: '0.8.11' - } - }, - requires: { - 'readable-stream': { - Writable: function() {} - } - } - } - ); - return streamLib.required; - }, - 'it should use readable-stream to maintain compatibility': function(required) { - assert.ok(required['readable-stream']); - assert.ok(!required.stream); - } - }, - - 'when node version > 0.10.0': { - topic: function() { - var streamLib = sandbox.load( - '../../lib/streams/BaseRollingFileStream', - { - globals: { - process: { - version: '0.10.1' - } - }, - requires: { - 'stream': { - Writable: function() {} - } - } - } - ); - return streamLib.required; - }, - 'it should use the core stream module': function(required) { - assert.ok(required.stream); - assert.ok(!required['readable-stream']); - } - }, - - 'when no filename is passed': { - topic: require('../../lib/streams/BaseRollingFileStream'), - 'it should throw an error': function(BaseRollingFileStream) { - try { - new BaseRollingFileStream(); - assert.fail('should not get here'); - } catch (e) { - assert.ok(e); - } - } - }, - - 'default behaviour': { - topic: function() { - var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream') - , stream = new BaseRollingFileStream('basetest.log'); - return stream; - }, - teardown: function() { - try { - fs.unlink('basetest.log'); - } catch (e) { - console.error("could not remove basetest.log", e); - } - }, - 'it should not want to roll': function(stream) { - assert.isFalse(stream.shouldRoll()); - }, - 'it should not roll': function(stream) { - var cbCalled = false; - //just calls the callback straight away, no async calls - stream.roll('basetest.log', function() { cbCalled = true; }); - assert.isTrue(cbCalled); - } - } -}).exportTo(module); diff --git a/test/streams/DateRollingFileStream-test.js b/test/streams/DateRollingFileStream-test.js deleted file mode 100644 index 33f014b..0000000 --- a/test/streams/DateRollingFileStream-test.js +++ /dev/null @@ -1,227 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, fs = require('fs') -, semver = require('semver') -, streams -, DateRollingFileStream -, testTime = new Date(2012, 8, 12, 10, 37, 11); - -if (semver.satisfies(process.version, '>=0.10.0')) { - streams = require('stream'); -} else { - streams = require('readable-stream'); -} -DateRollingFileStream = require('../../lib/streams').DateRollingFileStream; - -function cleanUp(filename) { - return function() { - fs.unlink(filename); - }; -} - -function now() { - return testTime.getTime(); -} - -vows.describe('DateRollingFileStream').addBatch({ - 'arguments': { - topic: new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-1', - 'yyyy-mm-dd.hh' - ), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'), - - 'should take a filename and a pattern and return a WritableStream': function(stream) { - assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1'); - assert.equal(stream.pattern, 'yyyy-mm-dd.hh'); - assert.instanceOf(stream, streams.Writable); - }, - 'with default settings for the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, 420); - assert.equal(stream.theStream.flags, 'a'); - //encoding is not available on the underlying stream - //assert.equal(stream.encoding, 'utf8'); - } - }, - - 'default arguments': { - topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'), - - 'pattern should be .yyyy-MM-dd': function(stream) { - assert.equal(stream.pattern, '.yyyy-MM-dd'); - } - }, - - 'with stream arguments': { - topic: new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-3', - 'yyyy-MM-dd', - { mode: parseInt('0666', 8) } - ), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'), - - 'should pass them to the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, parseInt('0666', 8)); - } - }, - - 'with stream arguments but no pattern': { - topic: new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-4', - { mode: parseInt('0666', 8) } - ), - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'), - - 'should pass them to the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, parseInt('0666', 8)); - }, - 'should use default pattern': function(stream) { - assert.equal(stream.pattern, '.yyyy-MM-dd'); - } - }, - - 'with a pattern of .yyyy-MM-dd': { - topic: function() { - var that = this, - stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', - null, - now - ); - stream.write("First message\n", 'utf8', function() { - that.callback(null, stream); - }); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'), - - 'should create a file with the base name': { - topic: function(stream) { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback); - }, - 'file should contain first message': function(result) { - assert.equal(result.toString(), "First message\n"); - } - }, - - 'when the day changes': { - topic: function(stream) { - testTime = new Date(2012, 8, 13, 0, 10, 12); - stream.write("Second message\n", 'utf8', this.callback); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'), - - - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be two': function(files) { - assert.equal( - files.filter( - function(file) { - return file.indexOf('test-date-rolling-file-stream-5') > -1; - } - ).length, - 2 - ); - } - }, - - 'the file without a date': { - topic: function() { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback); - }, - 'should contain the second message': function(contents) { - assert.equal(contents.toString(), "Second message\n"); - } - }, - - 'the file with the date': { - topic: function() { - fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback); - }, - 'should contain the first message': function(contents) { - assert.equal(contents.toString(), "First message\n"); - } - } - } - }, - - 'with alwaysIncludePattern': { - topic: function() { - var that = this, - testTime = new Date(2012, 8, 12, 0, 10, 12), - stream = new DateRollingFileStream( - __dirname + '/test-date-rolling-file-stream-pattern', - '.yyyy-MM-dd', - {alwaysIncludePattern: true}, - now - ); - stream.write("First message\n", 'utf8', function() { - that.callback(null, stream); - }); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'), - - 'should create a file with the pattern set': { - topic: function(stream) { - fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback); - }, - 'file should contain first message': function(result) { - assert.equal(result.toString(), "First message\n"); - } - }, - - 'when the day changes': { - topic: function(stream) { - testTime = new Date(2012, 8, 13, 0, 10, 12); - stream.write("Second message\n", 'utf8', this.callback); - }, - teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'), - - - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be two': function(files) { - assert.equal( - files.filter( - function(file) { - return file.indexOf('test-date-rolling-file-stream-pattern') > -1; - } - ).length, - 2 - ); - } - }, - - 'the file with the later date': { - topic: function() { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', - this.callback - ); - }, - 'should contain the second message': function(contents) { - assert.equal(contents.toString(), "Second message\n"); - } - }, - - 'the file with the date': { - topic: function() { - fs.readFile( - __dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', - this.callback - ); - }, - 'should contain the first message': function(contents) { - assert.equal(contents.toString(), "First message\n"); - } - } - } - } - -}).exportTo(module); diff --git a/test/streams/rollingFileStream-test.js b/test/streams/rollingFileStream-test.js deleted file mode 100644 index c3d9fc3..0000000 --- a/test/streams/rollingFileStream-test.js +++ /dev/null @@ -1,207 +0,0 @@ -"use strict"; -var vows = require('vows') -, assert = require('assert') -, events = require('events') -, fs = require('fs') -, semver = require('semver') -, streams -, RollingFileStream; - -if (semver.satisfies(process.version, '>=0.10.0')) { - streams = require('stream'); -} else { - streams = require('readable-stream'); -} -RollingFileStream = require('../../lib/streams').RollingFileStream; - -function remove(filename) { - try { - fs.unlinkSync(filename); - } catch (e) { - //doesn't really matter if it failed - } -} - -function create(filename) { - fs.writeFileSync(filename, "test file"); -} - -vows.describe('RollingFileStream').addBatch({ - 'arguments': { - topic: function() { - remove(__dirname + "/test-rolling-file-stream"); - return new RollingFileStream("test-rolling-file-stream", 1024, 5); - }, - 'should take a filename, file size (bytes), no. backups, return Writable': function(stream) { - assert.instanceOf(stream, streams.Writable); - assert.equal(stream.filename, "test-rolling-file-stream"); - assert.equal(stream.size, 1024); - assert.equal(stream.backups, 5); - }, - 'with default settings for the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, 420); - assert.equal(stream.theStream.flags, 'a'); - //encoding isn't a property on the underlying stream - //assert.equal(stream.theStream.encoding, 'utf8'); - } - }, - 'with stream arguments': { - topic: function() { - remove(__dirname + '/test-rolling-file-stream'); - return new RollingFileStream( - 'test-rolling-file-stream', - 1024, - 5, - { mode: parseInt('0666', 8) } - ); - }, - 'should pass them to the underlying stream': function(stream) { - assert.equal(stream.theStream.mode, parseInt('0666', 8)); - } - }, - 'without size': { - topic: function() { - try { - new RollingFileStream(__dirname + "/test-rolling-file-stream"); - } catch (e) { - return e; - } - }, - 'should throw an error': function(err) { - assert.instanceOf(err, Error); - } - }, - 'without number of backups': { - topic: function() { - remove('test-rolling-file-stream'); - return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024); - }, - 'should default to 1 backup': function(stream) { - assert.equal(stream.backups, 1); - } - }, - 'writing less than the file size': { - topic: function() { - remove(__dirname + "/test-rolling-file-stream-write-less"); - var that = this - , stream = new RollingFileStream( - __dirname + "/test-rolling-file-stream-write-less", - 100 - ); - stream.write("cheese", "utf8", function() { - stream.end(); - fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback); - }); - }, - 'should write to the file': function(contents) { - assert.equal(contents, "cheese"); - }, - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be one': function(files) { - assert.equal( - files.filter( - function(file) { - return file.indexOf('test-rolling-file-stream-write-less') > -1; - } - ).length, - 1 - ); - } - } - }, - 'writing more than the file size': { - topic: function() { - remove(__dirname + "/test-rolling-file-stream-write-more"); - remove(__dirname + "/test-rolling-file-stream-write-more.1"); - var that = this - , stream = new RollingFileStream( - __dirname + "/test-rolling-file-stream-write-more", - 45 - ); - - write7Cheese(that, stream); - }, - 'the number of files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be two': function(files) { - assert.equal(files.filter( - function(file) { - return file.indexOf('test-rolling-file-stream-write-more') > -1; - } - ).length, 2); - } - }, - 'the first file': { - topic: function() { - fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback); - }, - 'should contain the last two log messages': function(contents) { - assert.equal(contents, '5.cheese\n6.cheese\n'); - } - }, - 'the second file': { - topic: function() { - fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback); - }, - 'should contain the first five log messages': function(contents) { - assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n'); - } - } - }, - 'when many files already exist': { - topic: function() { - remove(__dirname + '/test-rolling-stream-with-existing-files.11'); - remove(__dirname + '/test-rolling-stream-with-existing-files.20'); - remove(__dirname + '/test-rolling-stream-with-existing-files.-1'); - remove(__dirname + '/test-rolling-stream-with-existing-files.1.1'); - remove(__dirname + '/test-rolling-stream-with-existing-files.1'); - - - create(__dirname + '/test-rolling-stream-with-existing-files.11'); - create(__dirname + '/test-rolling-stream-with-existing-files.20'); - create(__dirname + '/test-rolling-stream-with-existing-files.-1'); - create(__dirname + '/test-rolling-stream-with-existing-files.1.1'); - create(__dirname + '/test-rolling-stream-with-existing-files.1'); - - var that = this - , stream = new RollingFileStream( - __dirname + "/test-rolling-stream-with-existing-files", - 45, - 5 - ); - - write7Cheese(that, stream); - }, - 'the files': { - topic: function() { - fs.readdir(__dirname, this.callback); - }, - 'should be rolled': function(files) { - assert.include(files, 'test-rolling-stream-with-existing-files'); - assert.include(files, 'test-rolling-stream-with-existing-files.1'); - assert.include(files, 'test-rolling-stream-with-existing-files.2'); - assert.include(files, 'test-rolling-stream-with-existing-files.11'); - assert.include(files, 'test-rolling-stream-with-existing-files.20'); - } - } - } -}).exportTo(module); - -function write7Cheese(that, stream) { - var streamed = 0; - [0, 1, 2, 3, 4, 5, 6].forEach(function(i) { - stream.write(i +".cheese\n", "utf8", function(e) { - streamed++; - if (e) { return that.callback(e); } - if (streamed === 7) { - stream.end(); - that.callback(); - } - }); - }); -} diff --git a/test/tape/default-settings-test.js b/test/tape/default-settings-test.js new file mode 100644 index 0000000..81f61f7 --- /dev/null +++ b/test/tape/default-settings-test.js @@ -0,0 +1,33 @@ +"use strict"; +var test = require('tape') +, sandbox = require('sandboxed-module'); + +test('default settings', function(t) { + var output = [] + , log4js = sandbox.require( + '../../lib/log4js', + { + requires: { + './appenders/stdout': { + 'name': 'stdout', + 'appender': function () { + return function(evt) { + output.push(evt); + }; + }, + 'configure': function (config) { + return this.appender(); + } + } + } + } + ) + , logger = log4js.getLogger("default-settings"); + + logger.info("This should go to stdout."); + + t.plan(2); + t.equal(output.length, 1, "It should log to stdout."); + t.equal(output[0].data[0], "This should go to stdout.", "It should log the message."); + t.end(); +}); diff --git a/test/tape/stderrAppender-test.js b/test/tape/stderrAppender-test.js new file mode 100644 index 0000000..1c949e3 --- /dev/null +++ b/test/tape/stderrAppender-test.js @@ -0,0 +1,22 @@ +"use strict"; +var test = require('tape') +, layouts = require('../../lib/layouts') +, sandbox = require('sandboxed-module'); + +test('stderr appender', function(t) { + var output = [] + , appender = sandbox.require( + '../../lib/appenders/stderr', + { + globals: { + process: { stderr: { write : function(data) { output.push(data); } } } + } + } + ).appender(layouts.messagePassThroughLayout); + + appender({ data: ["biscuits"] }); + t.plan(2); + t.equal(output.length, 1, 'There should be one message.'); + t.equal(output[0], 'biscuits\n', 'The message should be biscuits.'); + t.end(); +}); diff --git a/test/tape/stdoutAppender-test.js b/test/tape/stdoutAppender-test.js new file mode 100644 index 0000000..32c9b83 --- /dev/null +++ b/test/tape/stdoutAppender-test.js @@ -0,0 +1,22 @@ +"use strict"; +var test = require('tape') +, layouts = require('../../lib/layouts') +, sandbox = require('sandboxed-module'); + +test('stdout appender', function(t) { + var output = [] + , appender = sandbox.require( + '../../lib/appenders/stdout', + { + globals: { + process: { stdout: { write : function(data) { output.push(data); } } } + } + } + ).appender(layouts.messagePassThroughLayout); + + appender({ data: ["cheese"] }); + t.plan(2); + t.equal(output.length, 1, 'There should be one message.'); + t.equal(output[0], 'cheese\n', 'The message should be cheese.'); + t.end(); +}); diff --git a/test/categoryFilter-test.js b/test/vows/categoryFilter-test.js similarity index 90% rename from test/categoryFilter-test.js rename to test/vows/categoryFilter-test.js index 15a7b90..4a4c4ff 100644 --- a/test/categoryFilter-test.js +++ b/test/vows/categoryFilter-test.js @@ -17,9 +17,9 @@ vows.describe('log4js categoryFilter').addBatch({ 'appender': { topic: function() { - var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger; + var log4js = require('../../lib/log4js'), logEvents = [], webLogger, appLogger; log4js.clearAppenders(); - var appender = require('../lib/appenders/categoryFilter') + var appender = require('../../lib/appenders/categoryFilter') .appender( ['app'], function(evt) { logEvents.push(evt); } @@ -45,13 +45,13 @@ vows.describe('log4js categoryFilter').addBatch({ 'configure': { topic: function() { - var log4js = require('../lib/log4js') + var log4js = require('../../lib/log4js') , logger, weblogger; remove(__dirname + '/categoryFilter-web.log'); remove(__dirname + '/categoryFilter-noweb.log'); - log4js.configure('test/with-categoryFilter.json'); + log4js.configure('test/vows/with-categoryFilter.json'); logger = log4js.getLogger("app"); weblogger = log4js.getLogger("web"); diff --git a/test/clusteredAppender-test.js b/test/vows/clusteredAppender-test.js similarity index 94% rename from test/clusteredAppender-test.js rename to test/vows/clusteredAppender-test.js index 76cb37a..5de0e4a 100755 --- a/test/clusteredAppender-test.js +++ b/test/vows/clusteredAppender-test.js @@ -1,9 +1,9 @@ "use strict"; var assert = require('assert'); var vows = require('vows'); -var layouts = require('../lib/layouts'); +var layouts = require('../../lib/layouts'); var sandbox = require('sandboxed-module'); -var LoggingEvent = require('../lib/logger').LoggingEvent; +var LoggingEvent = require('../../lib/logger').LoggingEvent; var cluster = require('cluster'); vows.describe('log4js cluster appender').addBatch({ @@ -42,7 +42,7 @@ vows.describe('log4js cluster appender').addBatch({ }; // Load appender and fake modules in it - var appenderModule = sandbox.require('../lib/appenders/clustered', { + var appenderModule = sandbox.require('../../lib/appenders/clustered', { requires: { 'cluster': fakeCluster, } @@ -119,7 +119,7 @@ vows.describe('log4js cluster appender').addBatch({ }; // Load appender and fake modules in it - var appenderModule = sandbox.require('../lib/appenders/clustered', { + var appenderModule = sandbox.require('../../lib/appenders/clustered', { requires: { 'cluster': fakeCluster, }, diff --git a/test/configuration-test.js b/test/vows/configuration-test.js similarity index 94% rename from test/configuration-test.js rename to test/vows/configuration-test.js index ddbf7df..aa22f26 100644 --- a/test/configuration-test.js +++ b/test/vows/configuration-test.js @@ -24,7 +24,7 @@ vows.describe('log4js configure').addBatch({ topic: function() { var testAppender = makeTestAppender(), log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { './appenders/cheese': testAppender @@ -55,14 +55,14 @@ vows.describe('log4js configure').addBatch({ topic: function() { var testAppender = makeTestAppender(), log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { './appenders/cheese': testAppender } } ); log4js.loadAppender('cheese'); return log4js; }, - 'should load appender from ../lib/appenders': function(log4js) { + 'should load appender from ../../lib/appenders': function(log4js) { assert.ok(log4js.appenders.cheese); }, 'should add appender configure function to appenderMakers' : function(log4js) { @@ -73,7 +73,7 @@ vows.describe('log4js configure').addBatch({ topic: function() { var testAppender = makeTestAppender(), log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'some/other/external': testAppender } } ); log4js.loadAppender('some/other/external'); @@ -89,7 +89,7 @@ vows.describe('log4js configure').addBatch({ 'when appender object loaded via loadAppender': { topic: function() { var testAppender = makeTestAppender(), - log4js = sandbox.require('../lib/log4js'); + log4js = sandbox.require('../../lib/log4js'); log4js.loadAppender('some/other/external', testAppender); return log4js; @@ -131,7 +131,7 @@ vows.describe('log4js configure').addBatch({ } }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'fs': fakeFS, diff --git a/test/configureNoLevels-test.js b/test/vows/configureNoLevels-test.js similarity index 98% rename from test/configureNoLevels-test.js rename to test/vows/configureNoLevels-test.js index 55bd987..2c63e2f 100644 --- a/test/configureNoLevels-test.js +++ b/test/vows/configureNoLevels-test.js @@ -10,7 +10,7 @@ // Basic set up var vows = require('vows'); var assert = require('assert'); -var toLevel = require('../lib/levels').toLevel; +var toLevel = require('../../lib/levels').toLevel; // uncomment one or other of the following to see progress (or not) while running the tests // var showProgress = console.log; @@ -47,7 +47,7 @@ function getLoggerName(level) { function getTopLevelContext(nop, configToTest, name) { return { topic: function() { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); // create loggers for each level, // keeping the level in the logger's name for traceability strLevels.forEach(function(l) { diff --git a/test/connect-logger-test.js b/test/vows/connect-logger-test.js similarity index 98% rename from test/connect-logger-test.js rename to test/vows/connect-logger-test.js index 9fda257..c81d550 100644 --- a/test/connect-logger-test.js +++ b/test/vows/connect-logger-test.js @@ -4,7 +4,7 @@ var vows = require('vows') , assert = require('assert') , util = require('util') , EE = require('events').EventEmitter -, levels = require('../lib/levels'); +, levels = require('../../lib/levels'); function MockLogger() { @@ -63,7 +63,7 @@ function request(cl, method, url, code, reqHeaders, resHeaders) { vows.describe('log4js connect logger').addBatch({ 'getConnectLoggerModule': { topic: function() { - var clm = require('../lib/connect-logger'); + var clm = require('../../lib/connect-logger'); return clm; }, diff --git a/test/consoleAppender-test.js b/test/vows/consoleAppender-test.js similarity index 82% rename from test/consoleAppender-test.js rename to test/vows/consoleAppender-test.js index 3887ce5..9ac24c4 100644 --- a/test/consoleAppender-test.js +++ b/test/vows/consoleAppender-test.js @@ -1,10 +1,10 @@ "use strict"; var assert = require('assert') , vows = require('vows') -, layouts = require('../lib/layouts') +, layouts = require('../../lib/layouts') , sandbox = require('sandboxed-module'); -vows.describe('../lib/appenders/console').addBatch({ +vows.describe('../../lib/appenders/console').addBatch({ 'appender': { topic: function() { var messages = [] @@ -12,7 +12,7 @@ vows.describe('../lib/appenders/console').addBatch({ log: function(msg) { messages.push(msg); } } , appenderModule = sandbox.require( - '../lib/appenders/console', + '../../lib/appenders/console', { globals: { 'console': fakeConsole @@ -29,5 +29,5 @@ vows.describe('../lib/appenders/console').addBatch({ assert.equal(messages[0], 'blah'); } } - + }).exportTo(module); diff --git a/test/dateFileAppender-test.js b/test/vows/dateFileAppender-test.js similarity index 84% rename from test/dateFileAppender-test.js rename to test/vows/dateFileAppender-test.js index 8fa115f..5bea92e 100644 --- a/test/dateFileAppender-test.js +++ b/test/vows/dateFileAppender-test.js @@ -4,7 +4,7 @@ var vows = require('vows') , path = require('path') , fs = require('fs') , sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js') +, log4js = require('../../lib/log4js') , EOL = require('os').EOL || '\n'; function removeFile(filename) { @@ -17,20 +17,20 @@ function removeFile(filename) { }; } -vows.describe('../lib/appenders/dateFile').addBatch({ +vows.describe('../../lib/appenders/dateFile').addBatch({ 'appender': { 'adding multiple dateFileAppenders': { topic: function () { var listenersCount = process.listeners('exit').length, - dateFileAppender = require('../lib/appenders/dateFile'), + dateFileAppender = require('../../lib/appenders/dateFile'), count = 5, logfile; - + while (count--) { logfile = path.join(__dirname, 'datefa-default-test' + count + '.log'); log4js.addAppender(dateFileAppender.appender(logfile)); } - + return listenersCount; }, teardown: function() { @@ -40,7 +40,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({ removeFile('datefa-default-test3.log')(); removeFile('datefa-default-test4.log')(); }, - + 'should only add one `exit` listener': function (initialCount) { assert.equal(process.listeners('exit').length, initialCount + 1); }, @@ -52,7 +52,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({ var exitListener , openedFiles = [] , dateFileAppender = sandbox.require( - '../lib/appenders/dateFile', + '../../lib/appenders/dateFile', { globals: { process: { @@ -62,7 +62,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({ } }, requires: { - '../streams': { + 'streamroller': { DateRollingFileStream: function(filename) { openedFiles.push(filename); @@ -71,7 +71,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({ }; } } - } + } } ); for (var i=0; i < 5; i += 1) { @@ -85,55 +85,55 @@ vows.describe('../lib/appenders/dateFile').addBatch({ assert.isEmpty(openedFiles); } }, - + 'with default settings': { topic: function() { var that = this, testFile = path.join(__dirname, 'date-appender-default.log'), - appender = require('../lib/appenders/dateFile').appender(testFile), + appender = require('../../lib/appenders/dateFile').appender(testFile), logger = log4js.getLogger('default-settings'); log4js.clearAppenders(); log4js.addAppender(appender, 'default-settings'); - + logger.info("This should be in the file."); - + setTimeout(function() { fs.readFile(testFile, "utf8", that.callback); }, 100); - + }, teardown: removeFile('date-appender-default.log'), - + 'should write to the file': function(contents) { assert.include(contents, 'This should be in the file'); }, - + 'should use the basic layout': function(contents) { assert.match( - contents, + contents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - / ); } } - + } }).addBatch({ 'configure': { 'with dateFileAppender': { topic: function() { - var log4js = require('../lib/log4js') + var log4js = require('../../lib/log4js') , logger; //this config file defines one file appender (to ./date-file-test.log) //and sets the log level for "tests" to WARN - log4js.configure('test/with-dateFile.json'); + log4js.configure('test/vows/with-dateFile.json'); logger = log4js.getLogger('tests'); logger.info('this should not be written to the file'); logger.warn('this should be written to the file'); - + fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback); }, teardown: removeFile('date-file-test.log'), - + 'should load appender configuration from a json file': function(err, contents) { if (err) { throw err; @@ -145,26 +145,26 @@ vows.describe('../lib/appenders/dateFile').addBatch({ 'with options.alwaysIncludePattern': { topic: function() { var self = this - , log4js = require('../lib/log4js') - , format = require('../lib/date_format') + , log4js = require('../../lib/log4js') + , format = require('../../lib/date_format') , logger , options = { "appenders": [ { - "category": "tests", - "type": "dateFile", - "filename": "test/date-file-test", + "category": "tests", + "type": "dateFile", + "filename": "test/vows/date-file-test", "pattern": "-from-MM-dd.log", "alwaysIncludePattern": true, - "layout": { - "type": "messagePassThrough" + "layout": { + "type": "messagePassThrough" } } ] } , thisTime = format.asString(options.appenders[0].pattern, new Date()); fs.writeFileSync( - path.join(__dirname, 'date-file-test' + thisTime), + path.join(__dirname, 'date-file-test' + thisTime), "this is existing data" + EOL, 'utf8' ); @@ -189,10 +189,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({ topic: function () { var fileOpened, appender = sandbox.require( - '../lib/appenders/dateFile', + '../../lib/appenders/dateFile', { requires: - { '../streams': - { DateRollingFileStream: + { 'streamroller': + { DateRollingFileStream: function(file) { fileOpened = file; return { @@ -205,10 +205,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({ } ); appender.configure( - { - filename: "whatever.log", - maxLogSize: 10 - }, + { + filename: "whatever.log", + maxLogSize: 10 + }, { cwd: '/absolute/path/to' } ); return fileOpened; @@ -218,6 +218,6 @@ vows.describe('../lib/appenders/dateFile').addBatch({ assert.equal(fileOpened, expected); } } - + } }).exportTo(module); diff --git a/test/date_format-test.js b/test/vows/date_format-test.js similarity index 97% rename from test/date_format-test.js rename to test/vows/date_format-test.js index 04adb08..02a545a 100644 --- a/test/date_format-test.js +++ b/test/vows/date_format-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows') , assert = require('assert') -, dateFormat = require('../lib/date_format'); +, dateFormat = require('../../lib/date_format'); function createFixedDate() { return new Date(2010, 0, 11, 14, 31, 30, 5); diff --git a/test/fileAppender-test.js b/test/vows/fileAppender-test.js similarity index 93% rename from test/fileAppender-test.js rename to test/vows/fileAppender-test.js index 007dbbd..e897b77 100644 --- a/test/fileAppender-test.js +++ b/test/vows/fileAppender-test.js @@ -3,7 +3,7 @@ var vows = require('vows') , fs = require('fs') , path = require('path') , sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js') +, log4js = require('../../lib/log4js') , assert = require('assert') , zlib = require('zlib') , EOL = require('os').EOL || '\n'; @@ -27,7 +27,10 @@ vows.describe('log4js fileAppender').addBatch({ while (count--) { logfile = path.join(__dirname, '/fa-default-test' + count + '.log'); - log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings'); + log4js.addAppender( + require('../../lib/appenders/file').appender(logfile), + 'default-settings' + ); } return listenersCount; @@ -43,7 +46,7 @@ vows.describe('log4js fileAppender').addBatch({ var exitListener , openedFiles = [] , fileAppender = sandbox.require( - '../lib/appenders/file', + '../../lib/appenders/file', { globals: { process: { @@ -53,7 +56,7 @@ vows.describe('log4js fileAppender').addBatch({ } }, requires: { - '../streams': { + 'streamroller': { RollingFileStream: function(filename) { openedFiles.push(filename); @@ -82,12 +85,15 @@ vows.describe('log4js fileAppender').addBatch({ 'with default fileAppender settings': { topic: function() { var that = this - , testFile = path.join(__dirname, '/fa-default-test.log') + , testFile = path.join(__dirname, 'fa-default-test.log') , logger = log4js.getLogger('default-settings'); remove(testFile); log4js.clearAppenders(); - log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings'); + log4js.addAppender( + require('../../lib/appenders/file').appender(testFile), + 'default-settings' + ); logger.info("This should be in the file."); @@ -114,10 +120,10 @@ vows.describe('log4js fileAppender').addBatch({ function addAppender(cat) { var testFile = path.join( __dirname, - '/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log' + 'fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log' ); remove(testFile); - log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat); + log4js.addAppender(require('../../lib/appenders/file').appender(testFile), cat); return testFile; } @@ -198,7 +204,7 @@ vows.describe('log4js fileAppender').addBatch({ //log file of 100 bytes maximum, no backups log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), + require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), 'max-file-size' ); logger.info("This is the first log message."); @@ -237,7 +243,7 @@ vows.describe('log4js fileAppender').addBatch({ //log file of 50 bytes maximum, 2 backups log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), + require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), 'max-file-size-backups' ); logger.info("This is the first log message."); @@ -310,7 +316,7 @@ vows.describe('log4js fileAppender').addBatch({ //log file of 50 bytes maximum, 2 backups log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/file').appender( + require('../../lib/appenders/file').appender( testFile, log4js.layouts.basicLayout, 50, 2, true ), 'max-file-size-backups' @@ -380,11 +386,11 @@ vows.describe('log4js fileAppender').addBatch({ 'configure' : { 'with fileAppender': { topic: function() { - var log4js = require('../lib/log4js') + var log4js = require('../../lib/log4js') , logger; //this config file defines one file appender (to ./tmp-tests.log) //and sets the log level for "tests" to WARN - log4js.configure('./test/log4js.json'); + log4js.configure('./test/vows/log4js.json'); logger = log4js.getLogger('tests'); logger.info('this should not be written to the file'); logger.warn('this should be written to the file'); @@ -403,7 +409,7 @@ vows.describe('log4js fileAppender').addBatch({ var consoleArgs , errorHandler , fileAppender = sandbox.require( - '../lib/appenders/file', + '../../lib/appenders/file', { globals: { console: { @@ -413,7 +419,7 @@ vows.describe('log4js fileAppender').addBatch({ } }, requires: { - '../streams': { + 'streamroller': { RollingFileStream: function(filename) { this.end = function() {}; diff --git a/test/fileSyncAppender-test.js b/test/vows/fileSyncAppender-test.js similarity index 92% rename from test/fileSyncAppender-test.js rename to test/vows/fileSyncAppender-test.js index d6e2b29..1a32240 100755 --- a/test/fileSyncAppender-test.js +++ b/test/vows/fileSyncAppender-test.js @@ -3,7 +3,7 @@ var vows = require('vows') , fs = require('fs') , path = require('path') , sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js') +, log4js = require('../../lib/log4js') , assert = require('assert') , EOL = require('os').EOL || '\n'; @@ -27,7 +27,7 @@ vows.describe('log4js fileSyncAppender').addBatch({ log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/fileSync').appender(testFile), + require('../../lib/appenders/fileSync').appender(testFile), 'default-settings' ); @@ -55,7 +55,14 @@ vows.describe('log4js fileSyncAppender').addBatch({ //log file of 100 bytes maximum, no backups log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0), + require( + '../../lib/appenders/fileSync' + ).appender( + testFile, + log4js.layouts.basicLayout, + 100, + 0 + ), 'max-file-size' ); logger.info("This is the first log message."); @@ -92,7 +99,12 @@ vows.describe('log4js fileSyncAppender').addBatch({ //log file of 50 bytes maximum, 2 backups log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2), + require('../../lib/appenders/fileSync').appender( + testFile, + log4js.layouts.basicLayout, + 50, + 2 + ), 'max-file-size-backups' ); logger.info("This is the first log message."); @@ -156,7 +168,7 @@ vows.describe('log4js fileSyncAppender').addBatch({ 'configure' : { 'with fileSyncAppender': { topic: function() { - var log4js = require('../lib/log4js') + var log4js = require('../../lib/log4js') , logger; //this config defines one file appender (to ./tmp-sync-tests.log) //and sets the log level for "tests" to WARN diff --git a/test/gelfAppender-test.js b/test/vows/gelfAppender-test.js similarity index 97% rename from test/gelfAppender-test.js rename to test/vows/gelfAppender-test.js index 76fb5ea..0ee79bb 100644 --- a/test/gelfAppender-test.js +++ b/test/vows/gelfAppender-test.js @@ -2,8 +2,8 @@ var vows = require('vows') , assert = require('assert') , sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js') -, realLayouts = require('../lib/layouts') +, log4js = require('../../lib/log4js') +, realLayouts = require('../../lib/layouts') , setupLogging = function(options, category, compressedLength) { var fakeDgram = { sent: false, @@ -56,7 +56,7 @@ var vows = require('vows') }, messagePassThroughLayout: realLayouts.messagePassThroughLayout } - , appender = sandbox.require('../lib/appenders/gelf', { + , appender = sandbox.require('../../lib/appenders/gelf', { requires: { dgram: fakeDgram, zlib: fakeZlib, diff --git a/test/global-log-level-test.js b/test/vows/global-log-level-test.js similarity index 98% rename from test/global-log-level-test.js rename to test/vows/global-log-level-test.js index 4ccc583..b432ca1 100644 --- a/test/global-log-level-test.js +++ b/test/vows/global-log-level-test.js @@ -5,7 +5,7 @@ var vows = require('vows') vows.describe('log4js global loglevel').addBatch({ 'global loglevel' : { topic: function() { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); return log4js; }, diff --git a/test/hipchatAppender-test.js b/test/vows/hipchatAppender-test.js similarity index 97% rename from test/hipchatAppender-test.js rename to test/vows/hipchatAppender-test.js index 4769c3a..a514caa 100644 --- a/test/hipchatAppender-test.js +++ b/test/vows/hipchatAppender-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows'), assert = require('assert'), - log4js = require('../lib/log4js'), + log4js = require('../../lib/log4js'), sandbox = require('sandboxed-module'); function setupLogging(category, options) { @@ -34,7 +34,7 @@ function setupLogging(category, options) { } }; - var hipchatModule = sandbox.require('../lib/appenders/hipchat', { + var hipchatModule = sandbox.require('../../lib/appenders/hipchat', { requires: { 'hipchat-notifier': fakeHipchatNotifier } diff --git a/test/layouts-test.js b/test/vows/layouts-test.js similarity index 92% rename from test/layouts-test.js rename to test/vows/layouts-test.js index 1b7d2ef..7a7a606 100644 --- a/test/layouts-test.js +++ b/test/vows/layouts-test.js @@ -17,7 +17,7 @@ function test(args, pattern, value) { vows.describe('log4js layouts').addBatch({ 'colouredLayout': { topic: function() { - return require('../lib/layouts').colouredLayout; + return require('../../lib/layouts').colouredLayout; }, 'should apply level colour codes to output': function(layout) { @@ -46,7 +46,7 @@ vows.describe('log4js layouts').addBatch({ 'messagePassThroughLayout': { topic: function() { - return require('../lib/layouts').messagePassThroughLayout; + return require('../../lib/layouts').messagePassThroughLayout; }, 'should take a logevent and output only the message' : function(layout) { assert.equal(layout({ @@ -82,16 +82,24 @@ vows.describe('log4js layouts').addBatch({ }), "{ thing: 1 }"); }, 'should print the stacks of a passed error objects': function(layout) { - assert.isArray(layout({ - data: [ new Error() ], - startTime: new Date(2010, 11, 5, 14, 18, 30, 45), - categoryName: "cheese", - level: { - colour: "green", - toString: function() { return "ERROR"; } - } - }).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/) - , 'regexp did not return a match'); + assert.isArray( + layout({ + data: [ new Error() ], + startTime: new Date(2010, 11, 5, 14, 18, 30, 45), + categoryName: "cheese", + level: { + colour: "green", + toString: function() { return "ERROR"; } + } + }).match( + new RegExp('' + + /Error\s+at Object\..*\s+/.source + + /\((.*)test[\\\/]vows[\\\/]layouts-test\.js/.source + + /\:\d+\:\d+\)\s+at runTest/.source + ) + ), + 'regexp did not return a match' + ); }, 'with passed augmented errors': { topic: function(layout){ @@ -127,7 +135,7 @@ vows.describe('log4js layouts').addBatch({ 'basicLayout': { topic: function() { - var layout = require('../lib/layouts').basicLayout, + var layout = require('../../lib/layouts').basicLayout, event = { data: ['this is a test'], startTime: new Date(2010, 11, 5, 14, 18, 30, 45), @@ -196,7 +204,7 @@ vows.describe('log4js layouts').addBatch({ level: { toString: function() { return "DEBUG"; } } - }, layout = require('../lib/layouts').patternLayout + }, layout = require('../../lib/layouts').patternLayout , tokens = { testString: 'testStringToken', testFunction: function() { return 'testFunctionToken'; }, @@ -304,7 +312,7 @@ vows.describe('log4js layouts').addBatch({ } }, 'layout makers': { - topic: require('../lib/layouts'), + topic: require('../../lib/layouts'), 'should have a maker for each layout': function(layouts) { assert.ok(layouts.layout("messagePassThrough")); assert.ok(layouts.layout("basic")); @@ -314,7 +322,7 @@ vows.describe('log4js layouts').addBatch({ } }, 'add layout': { - topic: require('../lib/layouts'), + topic: require('../../lib/layouts'), 'should be able to add a layout': function(layouts) { layouts.addLayout('test_layout', function(config){ assert.equal(config, 'test_config'); diff --git a/test/levels-test.js b/test/vows/levels-test.js similarity index 99% rename from test/levels-test.js rename to test/vows/levels-test.js index df655fd..0933076 100644 --- a/test/levels-test.js +++ b/test/vows/levels-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows') , assert = require('assert') -, levels = require('../lib/levels'); +, levels = require('../../lib/levels'); function assertThat(level) { function assertForEach(assertion, test, otherLevels) { diff --git a/test/log-abspath-test.js b/test/vows/log-abspath-test.js similarity index 89% rename from test/log-abspath-test.js rename to test/vows/log-abspath-test.js index 5bb64d3..db45a0a 100644 --- a/test/log-abspath-test.js +++ b/test/vows/log-abspath-test.js @@ -9,7 +9,7 @@ vows.describe('log4js-abspath').addBatch({ topic: function() { var appenderOptions, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { './appenders/fake': { name: "fake", @@ -30,7 +30,7 @@ vows.describe('log4js-abspath').addBatch({ } ] }; - + log4js.configure(config, { cwd: '/absolute/path/to' }); @@ -45,10 +45,10 @@ vows.describe('log4js-abspath').addBatch({ topic: function() { var fileOpened, fileAppender = sandbox.require( - '../lib/appenders/file', + '../../lib/appenders/file', { requires: - { '../streams': - { RollingFileStream: + { 'streamroller': + { RollingFileStream: function(file) { fileOpened = file; return { @@ -61,10 +61,10 @@ vows.describe('log4js-abspath').addBatch({ } ); fileAppender.configure( - { - filename: "whatever.log", - maxLogSize: 10 - }, + { + filename: "whatever.log", + maxLogSize: 10 + }, { cwd: '/absolute/path/to' } ); return fileOpened; diff --git a/test/log4js.json b/test/vows/log4js.json similarity index 100% rename from test/log4js.json rename to test/vows/log4js.json diff --git a/test/logFacesAppender-test.js b/test/vows/logFacesAppender-test.js similarity index 95% rename from test/logFacesAppender-test.js rename to test/vows/logFacesAppender-test.js index 3c2d62c..cbe6bd1 100644 --- a/test/logFacesAppender-test.js +++ b/test/vows/logFacesAppender-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows'), assert = require('assert'), - log4js = require('../lib/log4js'), + log4js = require('../../lib/log4js'), sandbox = require('sandboxed-module'); function setupLogging(category, options) { @@ -23,7 +23,7 @@ function setupLogging(category, options) { } }; - var lfsModule = sandbox.require('../lib/appenders/logFacesAppender', { + var lfsModule = sandbox.require('../../lib/appenders/logFacesAppender', { requires: { 'dgram': fakeDgram } diff --git a/test/logLevelFilter-test.js b/test/vows/logLevelFilter-test.js similarity index 92% rename from test/logLevelFilter-test.js rename to test/vows/logLevelFilter-test.js index b3deb05..18a9f0f 100644 --- a/test/logLevelFilter-test.js +++ b/test/vows/logLevelFilter-test.js @@ -16,10 +16,10 @@ function remove(filename) { vows.describe('log4js logLevelFilter').addBatch({ 'appender': { topic: function() { - var log4js = require('../lib/log4js'), logEvents = [], logger; + var log4js = require('../../lib/log4js'), logEvents = [], logger; log4js.clearAppenders(); log4js.addAppender( - require('../lib/appenders/logLevelFilter') + require('../../lib/appenders/logLevelFilter') .appender( 'ERROR', undefined, @@ -44,14 +44,14 @@ vows.describe('log4js logLevelFilter').addBatch({ 'configure': { topic: function() { - var log4js = require('../lib/log4js') + var log4js = require('../../lib/log4js') , logger; remove(__dirname + '/logLevelFilter.log'); remove(__dirname + '/logLevelFilter-warnings.log'); remove(__dirname + '/logLevelFilter-debugs.log'); - log4js.configure('test/with-logLevelFilter.json'); + log4js.configure('test/vows/with-logLevelFilter.json'); logger = log4js.getLogger("tests"); logger.debug('debug'); logger.info('info'); diff --git a/test/logger-test.js b/test/vows/logger-test.js similarity index 94% rename from test/logger-test.js rename to test/vows/logger-test.js index 0bd29e1..976cb47 100644 --- a/test/logger-test.js +++ b/test/vows/logger-test.js @@ -1,11 +1,11 @@ "use strict"; var vows = require('vows') , assert = require('assert') -, levels = require('../lib/levels') -, loggerModule = require('../lib/logger') +, levels = require('../../lib/levels') +, loggerModule = require('../../lib/logger') , Logger = loggerModule.Logger; -vows.describe('../lib/logger').addBatch({ +vows.describe('../../lib/logger').addBatch({ 'constructor with no parameters': { topic: new Logger(), 'should use default category': function(logger) { diff --git a/test/logging-test.js b/test/vows/logging-test.js similarity index 95% rename from test/logging-test.js rename to test/vows/logging-test.js index 2d71d42..b1731f3 100644 --- a/test/logging-test.js +++ b/test/vows/logging-test.js @@ -15,7 +15,7 @@ function setupConsoleTest() { }); log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { globals: { console: fakeConsole @@ -35,7 +35,7 @@ vows.describe('log4js').addBatch({ 'getBufferedLogger': { topic: function () { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); log4js.clearAppenders(); var logger = log4js.getBufferedLogger('tests'); return logger; @@ -54,7 +54,7 @@ vows.describe('log4js').addBatch({ 'cache events': { topic: function () { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); log4js.clearAppenders(); var logger = log4js.getBufferedLogger('tests1'); var events = []; @@ -78,7 +78,7 @@ vows.describe('log4js').addBatch({ 'log events after flush() is called': { topic: function () { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); log4js.clearAppenders(); var logger = log4js.getBufferedLogger('tests2'); logger.target.setLevel("TRACE"); @@ -106,7 +106,7 @@ vows.describe('log4js').addBatch({ 'getLogger': { topic: function() { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); log4js.clearAppenders(); var logger = log4js.getLogger('tests'); logger.setLevel("DEBUG"); @@ -162,7 +162,7 @@ vows.describe('log4js').addBatch({ shutdownCallbackCalled: false }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { './appenders/file': @@ -194,7 +194,7 @@ vows.describe('log4js').addBatch({ events.shutdownCallbackCalled = true; // Re-enable log writing so other tests that use logger are not // affected. - require('../lib/logger').enableAllLogWrites(); + require('../../lib/logger').enableAllLogWrites(); callback(null, events); }); }, @@ -220,7 +220,7 @@ vows.describe('log4js').addBatch({ topic: function() { var appenderConfig, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { './appenders/file': @@ -254,7 +254,7 @@ vows.describe('log4js').addBatch({ 'configuration that causes an error': { topic: function() { var log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { './appenders/file': @@ -292,7 +292,7 @@ vows.describe('log4js').addBatch({ var appenderConfig, configFilename, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'fs': { statSync: @@ -353,24 +353,24 @@ vows.describe('log4js').addBatch({ } }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { - './appenders/console': fakeConsoleAppender + './appenders/stdout': fakeConsoleAppender } } ); logger = log4js.getLogger("some-logger"); logger.debug("This is a test"); }, - 'should default to the console appender': function(evt) { + 'should default to the stdout appender': function(evt) { assert.equal(evt.data[0], "This is a test"); } }, 'addAppender' : { topic: function() { - var log4js = require('../lib/log4js'); + var log4js = require('../../lib/log4js'); log4js.clearAppenders(); return log4js; }, @@ -487,10 +487,10 @@ vows.describe('log4js').addBatch({ log: function() { } }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { - './appenders/console': fakeConsole + './appenders/stdout': fakeConsole }, globals: { console: globalConsole @@ -505,7 +505,7 @@ vows.describe('log4js').addBatch({ return appenderEvents; }, - 'should configure a console appender': function(appenderEvents) { + 'should configure a stdout appender': function(appenderEvents) { assert.equal(appenderEvents[0].data[0], 'this is a test'); }, @@ -607,13 +607,13 @@ vows.describe('log4js').addBatch({ 'configuration persistence' : { topic: function() { var logEvent, - firstLog4js = require('../lib/log4js'), + firstLog4js = require('../../lib/log4js'), secondLog4js; firstLog4js.clearAppenders(); firstLog4js.addAppender(function(evt) { logEvent = evt; }); - secondLog4js = require('../lib/log4js'); + secondLog4js = require('../../lib/log4js'); secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js"); return logEvent; @@ -625,7 +625,7 @@ vows.describe('log4js').addBatch({ 'getDefaultLogger': { topic: function() { - return require('../lib/log4js').getDefaultLogger(); + return require('../../lib/log4js').getDefaultLogger(); }, 'should return a logger': function(logger) { assert.ok(logger.info); diff --git a/test/logglyAppender-test.js b/test/vows/logglyAppender-test.js similarity index 96% rename from test/logglyAppender-test.js rename to test/vows/logglyAppender-test.js index 688e43e..d5dc3c4 100644 --- a/test/logglyAppender-test.js +++ b/test/vows/logglyAppender-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows') , assert = require('assert') - , log4js = require('../lib/log4js') + , log4js = require('../../lib/log4js') , sandbox = require('sandboxed-module') ; @@ -39,7 +39,7 @@ function setupLogging(category, options) { } }; - var logglyModule = sandbox.require('../lib/appenders/loggly', { + var logglyModule = sandbox.require('../../lib/appenders/loggly', { requires: { 'loggly': fakeLoggly, '../layouts': fakeLayouts diff --git a/test/logstashUDP-test.js b/test/vows/logstashUDP-test.js similarity index 96% rename from test/logstashUDP-test.js rename to test/vows/logstashUDP-test.js index 25d356c..8d5cf40 100644 --- a/test/logstashUDP-test.js +++ b/test/vows/logstashUDP-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows') , assert = require('assert') -, log4js = require('../lib/log4js') +, log4js = require('../../lib/log4js') , sandbox = require('sandboxed-module') ; @@ -24,7 +24,7 @@ function setupLogging(category, options) { } }; - var logstashModule = sandbox.require('../lib/appenders/logstashUDP', { + var logstashModule = sandbox.require('../../lib/appenders/logstashUDP', { requires: { 'dgram': fakeDgram } diff --git a/test/mailgunAppender-test.js b/test/vows/mailgunAppender-test.js similarity index 98% rename from test/mailgunAppender-test.js rename to test/vows/mailgunAppender-test.js index fa3842d..261fb1e 100644 --- a/test/mailgunAppender-test.js +++ b/test/vows/mailgunAppender-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows'); var assert = require('assert'); -var log4js = require('../lib/log4js'); +var log4js = require('../../lib/log4js'); var sandbox = require('sandboxed-module'); function setupLogging(category, options) { @@ -48,7 +48,7 @@ function setupLogging(category, options) { }; - var mailgunModule = sandbox.require('../lib/appenders/mailgun', { + var mailgunModule = sandbox.require('../../lib/appenders/mailgun', { requires: { 'mailgun-js': fakeMailgun, '../layouts': fakeLayouts diff --git a/test/multiprocess-test.js b/test/vows/multiprocess-test.js similarity index 97% rename from test/multiprocess-test.js rename to test/vows/multiprocess-test.js index d193e83..2e8bffb 100644 --- a/test/multiprocess-test.js +++ b/test/vows/multiprocess-test.js @@ -59,7 +59,7 @@ vows.describe('Multiprocess Appender').addBatch({ topic: function() { var fakeNet = makeFakeNet(), appender = sandbox.require( - '../lib/appenders/multiprocess', + '../../lib/appenders/multiprocess', { requires: { 'net': fakeNet @@ -118,7 +118,7 @@ vows.describe('Multiprocess Appender').addBatch({ topic: function() { var fakeNet = makeFakeNet(), appender = sandbox.require( - '../lib/appenders/multiprocess', + '../../lib/appenders/multiprocess', { requires: { 'net': fakeNet @@ -153,7 +153,7 @@ vows.describe('Multiprocess Appender').addBatch({ topic: function() { var fakeNet = makeFakeNet(), appender = sandbox.require( - '../lib/appenders/multiprocess', + '../../lib/appenders/multiprocess', { requires: { 'net': fakeNet @@ -172,7 +172,7 @@ vows.describe('Multiprocess Appender').addBatch({ topic: function() { var fakeNet = makeFakeNet(), appender = sandbox.require( - '../lib/appenders/multiprocess', + '../../lib/appenders/multiprocess', { requires: { 'net': fakeNet @@ -252,7 +252,7 @@ vows.describe('Multiprocess Appender').addBatch({ topic: function() { var fakeNet = makeFakeNet(), appender = sandbox.require( - '../lib/appenders/multiprocess', + '../../lib/appenders/multiprocess', { requires: { 'net': fakeNet @@ -273,7 +273,7 @@ vows.describe('Multiprocess Appender').addBatch({ var results = {} , fakeNet = makeFakeNet() , appender = sandbox.require( - '../lib/appenders/multiprocess', + '../../lib/appenders/multiprocess', { requires: { 'net': fakeNet, diff --git a/test/newLevel-test.js b/test/vows/newLevel-test.js similarity index 95% rename from test/newLevel-test.js rename to test/vows/newLevel-test.js index 72dece9..c0c2487 100644 --- a/test/newLevel-test.js +++ b/test/vows/newLevel-test.js @@ -1,12 +1,12 @@ "use strict"; var vows = require('vows') , assert = require('assert') - , Level = require('../lib/levels') - , log4js = require('../lib/log4js') - , loggerModule = require('../lib/logger') + , Level = require('../../lib/levels') + , log4js = require('../../lib/log4js') + , loggerModule = require('../../lib/logger') , Logger = loggerModule.Logger; -vows.describe('../lib/logger').addBatch({ +vows.describe('../../lib/logger').addBatch({ 'creating a new log level': { topic: function () { Level.forName("DIAG", 6000); diff --git a/test/nolog-test.js b/test/vows/nolog-test.js similarity index 99% rename from test/nolog-test.js rename to test/vows/nolog-test.js index 80c3c18..04776bf 100644 --- a/test/nolog-test.js +++ b/test/vows/nolog-test.js @@ -3,7 +3,7 @@ var vows = require('vows') , assert = require('assert') , util = require('util') , EE = require('events').EventEmitter -, levels = require('../lib/levels'); +, levels = require('../../lib/levels'); function MockLogger() { @@ -45,7 +45,7 @@ util.inherits(MockResponse, EE); vows.describe('log4js connect logger').addBatch({ 'getConnectLoggerModule': { topic: function() { - var clm = require('../lib/connect-logger'); + var clm = require('../../lib/connect-logger'); return clm; }, diff --git a/test/reloadConfiguration-test.js b/test/vows/reloadConfiguration-test.js similarity index 96% rename from test/reloadConfiguration-test.js rename to test/vows/reloadConfiguration-test.js index 060f089..781f577 100644 --- a/test/reloadConfiguration-test.js +++ b/test/vows/reloadConfiguration-test.js @@ -15,7 +15,7 @@ function setupConsoleTest() { }); log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { globals: { console: fakeConsole @@ -75,7 +75,7 @@ vows.describe('reload configuration').addBatch({ setIntervalCallback = cb; }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'fs': fakeFS, @@ -113,7 +113,7 @@ vows.describe('reload configuration').addBatch({ fileRead = 0, logEvents = [], logger, - modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'), + modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'), mtime = new Date(), fakeFS = { config: { @@ -152,7 +152,7 @@ vows.describe('reload configuration').addBatch({ setIntervalCallback = cb; }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'fs': fakeFS, @@ -193,7 +193,7 @@ vows.describe('reload configuration').addBatch({ fileRead = 0, logEvents = [], logger, - modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'), + modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'), mtime = new Date(), fakeFS = { config: { @@ -230,7 +230,7 @@ vows.describe('reload configuration').addBatch({ setIntervalCallback = cb; }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'fs': fakeFS, @@ -284,7 +284,7 @@ vows.describe('reload configuration').addBatch({ 'when called twice with reload options': { topic: function() { - var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'), + var modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'), fakeFS = { readFileSync: function (file, encoding) { return JSON.stringify({}); @@ -310,7 +310,7 @@ vows.describe('reload configuration').addBatch({ return 1234; }, log4js = sandbox.require( - '../lib/log4js', + '../../lib/log4js', { requires: { 'fs': fakeFS, diff --git a/test/setLevel-asymmetry-test.js b/test/vows/setLevel-asymmetry-test.js similarity index 98% rename from test/setLevel-asymmetry-test.js rename to test/vows/setLevel-asymmetry-test.js index 95ba84b..149a929 100644 --- a/test/setLevel-asymmetry-test.js +++ b/test/vows/setLevel-asymmetry-test.js @@ -10,7 +10,7 @@ // Basic set up var vows = require('vows'); var assert = require('assert'); -var log4js = require('../lib/log4js'); +var log4js = require('../../lib/log4js'); var logger = log4js.getLogger('test-setLevel-asymmetry'); // uncomment one or other of the following to see progress (or not) while running the tests diff --git a/test/slackAppender-test.js b/test/vows/slackAppender-test.js similarity index 97% rename from test/slackAppender-test.js rename to test/vows/slackAppender-test.js index 366bfcd..a49ab78 100644 --- a/test/slackAppender-test.js +++ b/test/vows/slackAppender-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows'); var assert = require('assert'); -var log4js = require('../lib/log4js'); +var log4js = require('../../lib/log4js'); var sandbox = require('sandboxed-module'); function setupLogging(category, options) { @@ -51,7 +51,7 @@ function setupLogging(category, options) { }; - var slackModule = sandbox.require('../lib/appenders/slack', { + var slackModule = sandbox.require('../../lib/appenders/slack', { requires: { 'slack-node': fakeSlack, '../layouts': fakeLayouts diff --git a/test/smtpAppender-test.js b/test/vows/smtpAppender-test.js similarity index 98% rename from test/smtpAppender-test.js rename to test/vows/smtpAppender-test.js index 5ebda98..af5ccd9 100644 --- a/test/smtpAppender-test.js +++ b/test/vows/smtpAppender-test.js @@ -1,7 +1,7 @@ "use strict"; var vows = require('vows'); var assert = require('assert'); -var log4js = require('../lib/log4js'); +var log4js = require('../../lib/log4js'); var sandbox = require('sandboxed-module'); function setupLogging(category, options) { @@ -41,7 +41,7 @@ function setupLogging(category, options) { var fakeTransportPlugin = function () { }; - var smtpModule = sandbox.require('../lib/appenders/smtp', { + var smtpModule = sandbox.require('../../lib/appenders/smtp', { requires: { 'nodemailer': fakeMailer, 'nodemailer-sendmail-transport': fakeTransportPlugin, diff --git a/test/subcategories-test.js b/test/vows/subcategories-test.js similarity index 97% rename from test/subcategories-test.js rename to test/vows/subcategories-test.js index 8570f0e..f34c36b 100644 --- a/test/subcategories-test.js +++ b/test/vows/subcategories-test.js @@ -2,8 +2,8 @@ var assert = require('assert') , vows = require('vows') , sandbox = require('sandboxed-module') -, log4js = require('../lib/log4js') -, levels = require('../lib/levels'); +, log4js = require('../../lib/log4js') +, levels = require('../../lib/levels'); vows.describe('subcategories').addBatch({ 'loggers created after levels configuration is loaded': { diff --git a/test/vows/with-categoryFilter.json b/test/vows/with-categoryFilter.json new file mode 100644 index 0000000..5cde0c6 --- /dev/null +++ b/test/vows/with-categoryFilter.json @@ -0,0 +1,23 @@ +{ + "appenders": [ + { + "type": "categoryFilter", + "exclude": "web", + "appender": { + "type": "file", + "filename": "test/vows/categoryFilter-noweb.log", + "layout": { + "type": "messagePassThrough" + } + } + }, + { + "category": "web", + "type": "file", + "filename": "test/vows/categoryFilter-web.log", + "layout": { + "type": "messagePassThrough" + } + } + ] +} diff --git a/test/vows/with-dateFile.json b/test/vows/with-dateFile.json new file mode 100644 index 0000000..4cc4381 --- /dev/null +++ b/test/vows/with-dateFile.json @@ -0,0 +1,17 @@ +{ + "appenders": [ + { + "category": "tests", + "type": "dateFile", + "filename": "test/vows/date-file-test.log", + "pattern": "-from-MM-dd", + "layout": { + "type": "messagePassThrough" + } + } + ], + + "levels": { + "tests": "WARN" + } +} diff --git a/test/with-log-rolling.json b/test/vows/with-log-rolling.json similarity index 100% rename from test/with-log-rolling.json rename to test/vows/with-log-rolling.json diff --git a/test/with-logLevelFilter.json b/test/vows/with-logLevelFilter.json similarity index 55% rename from test/with-logLevelFilter.json rename to test/vows/with-logLevelFilter.json index 7bcd8ad..c80367d 100644 --- a/test/with-logLevelFilter.json +++ b/test/vows/with-logLevelFilter.json @@ -1,15 +1,15 @@ { "appenders": [ - { - "category": "tests", + { + "category": "tests", "type": "logLevelFilter", "level": "WARN", "appender": { "type": "file", - "filename": "test/logLevelFilter-warnings.log", - "layout": { - "type": "messagePassThrough" - } + "filename": "test/vows/logLevelFilter-warnings.log", + "layout": { + "type": "messagePassThrough" + } } }, { @@ -19,22 +19,22 @@ "maxLevel": "DEBUG", "appender": { "type": "file", - "filename": "test/logLevelFilter-debugs.log", + "filename": "test/vows/logLevelFilter-debugs.log", "layout": { "type": "messagePassThrough" } } }, - { - "category": "tests", + { + "category": "tests", "type": "file", - "filename": "test/logLevelFilter.log", - "layout": { - "type": "messagePassThrough" - } + "filename": "test/vows/logLevelFilter.log", + "layout": { + "type": "messagePassThrough" + } } ], - + "levels": { "tests": "TRACE" } diff --git a/test/with-categoryFilter.json b/test/with-categoryFilter.json deleted file mode 100644 index 7998cc8..0000000 --- a/test/with-categoryFilter.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "appenders": [ - { - "type": "categoryFilter", - "exclude": "web", - "appender": { - "type": "file", - "filename": "test/categoryFilter-noweb.log", - "layout": { - "type": "messagePassThrough" - } - } - }, - { - "category": "web", - "type": "file", - "filename": "test/categoryFilter-web.log", - "layout": { - "type": "messagePassThrough" - } - } - ] -} diff --git a/test/with-dateFile.json b/test/with-dateFile.json deleted file mode 100644 index 1872743..0000000 --- a/test/with-dateFile.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "appenders": [ - { - "category": "tests", - "type": "dateFile", - "filename": "test/date-file-test.log", - "pattern": "-from-MM-dd", - "layout": { - "type": "messagePassThrough" - } - } - ], - - "levels": { - "tests": "WARN" - } -} From 301dd5002366d44bf260e21324e7c1fffb873bd0 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 23 Oct 2016 11:53:48 +1100 Subject: [PATCH 08/24] premature removal of semver --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index c061217..8500c43 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ }, "dependencies": { "debug": "^2.2.0", + "semver": "^5.3.0", "streamroller": "^0.1.0" }, "devDependencies": { From dc831b8a99593c417057e8cd877cb7b9bbab4138 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 23 Oct 2016 12:02:16 +1100 Subject: [PATCH 09/24] updated streamroller, because 0.1.0 was broken --- package.json | 2 +- test/vows/logging-test.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 8500c43..7f89f6c 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,7 @@ "dependencies": { "debug": "^2.2.0", "semver": "^5.3.0", - "streamroller": "^0.1.0" + "streamroller": "^0.1.1" }, "devDependencies": { "jshint": "^2.9.2", diff --git a/test/vows/logging-test.js b/test/vows/logging-test.js index b1731f3..b3f9309 100644 --- a/test/vows/logging-test.js +++ b/test/vows/logging-test.js @@ -473,7 +473,7 @@ vows.describe('log4js').addBatch({ topic: function() { var appenderEvents = [], fakeConsole = { - 'name': 'console', + 'name': 'stdout', 'appender': function () { return function(evt) { appenderEvents.push(evt); From bfdc796321f08bc1eef1895ef19a25520bad1b5d Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 24 Oct 2016 08:09:47 +1100 Subject: [PATCH 10/24] mentioned changes in 1.0 --- README.md | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index eaf9851..5c865fd 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,17 @@ # log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node) [![NPM](https://nodei.co/npm/log4js.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/log4js/) - + This is a conversion of the [log4js](https://github.com/stritti/log4js) -framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript. +framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript. Out of the box it supports the following features: * coloured console logging to stdout or stderr * replacement of node's console.log functions (optional) -* file appender, with log rolling based on file size +* file appender, with configurable log rolling based on file size or date * SMTP appender * GELF appender -* hook.io appender * Loggly appender * Logstash UDP appender * logFaces appender @@ -21,6 +20,12 @@ Out of the box it supports the following features: * configurable log message layout/patterns * different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.) +## Important changes in 1.0 + +The default appender has been changed from `console` to `stdout` - this alleviates a memory problem that happens when logging using console. If you're using log4js in a browser (via browserify), then you'll probably need to explicitly configure log4js to use the console appender now (unless browserify handles process.stdout). + +I'm also trying to move away from `vows` for the tests, and use `tape` instead. New tests should be added to `test/tape`, not the vows ones. + NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this: ```javascript @@ -51,7 +56,7 @@ By default, log4js outputs to stdout with the coloured layout (thanks to [masylu ``` See example.js for a full example, but here's a snippet (also in fromreadme.js): ```javascript -var log4js = require('log4js'); +var log4js = require('log4js'); //console log is loaded by default, so you won't normally need to do this //log4js.loadAppender('console'); log4js.loadAppender('file'); @@ -87,9 +92,9 @@ log4js.configure({ ## configuration You can configure the appenders and log levels manually (as above), or provide a -configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The -configuration file location may also be specified via the environment variable -LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`). +configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The +configuration file location may also be specified via the environment variable +LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`). An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`. You can configure log4js to check for configuration file changes at regular intervals, and if changed, reload. This allows changes to logging levels to occur without restarting the application. From a35b9a1b9c121f35c33570c7412d2538ddfafa4f Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Mon, 24 Oct 2016 08:35:42 +1100 Subject: [PATCH 11/24] not used any more --- lib/log4js.json | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 lib/log4js.json diff --git a/lib/log4js.json b/lib/log4js.json deleted file mode 100644 index 7b6d3e7..0000000 --- a/lib/log4js.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "appenders": [ - { - "type": "console" - } - ] -} \ No newline at end of file From 2162b0e8df75573596dd14bf4f976e0a0ebf9114 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 2 Nov 2016 08:08:01 +1100 Subject: [PATCH 12/24] removed support for node v0.10, bumped version of streamroller --- .travis.yml | 1 - package.json | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index a9eeacc..c886a6e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,4 +5,3 @@ node_js: - "5" - "4" - "0.12" - - "0.10" diff --git a/package.json b/package.json index 7f89f6c..2d5d968 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "url": "http://github.com/nomiddlename/log4js-node/issues" }, "engines": { - "node": ">=0.8" + "node": ">=0.12" }, "scripts": { "pretest": "jshint lib/ test/", @@ -32,7 +32,7 @@ "dependencies": { "debug": "^2.2.0", "semver": "^5.3.0", - "streamroller": "^0.1.1" + "streamroller": "^0.2.1" }, "devDependencies": { "jshint": "^2.9.2", From 7f078161e47ad3d33d091c9f0906801d13fedd92 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 2 Nov 2016 08:08:28 +1100 Subject: [PATCH 13/24] fixed to use streamroller --- lib/appenders/file.js | 42 ++++++++++++++-------------------- test/vows/fileAppender-test.js | 18 ++++++++------- 2 files changed, 27 insertions(+), 33 deletions(-) diff --git a/lib/appenders/file.js b/lib/appenders/file.js index 1ec78dd..4e767ad 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -1,5 +1,6 @@ "use strict"; -var layouts = require('../layouts') +var debug = require('debug')('log4js:file') +, layouts = require('../layouts') , path = require('path') , fs = require('fs') , streams = require('streamroller') @@ -34,10 +35,10 @@ process.on('SIGHUP', function() { * if not provided then logs won't be rotated. * @param numBackups - the number of log files to keep after logSize * has been reached (default 5) - * @param compress - flag that controls log file compression + * @param options - options to be passed to the underlying stream * @param timezoneOffset - optional timezone offset in minutes (default system local) */ -function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffset) { +function fileAppender (file, layout, logSize, numBackups, options, timezoneOffset) { var bytesWritten = 0; file = path.normalize(file); layout = layout || layouts.basicLayout; @@ -45,13 +46,14 @@ function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffs //there has to be at least one backup if logSize has been specified numBackups = numBackups === 0 ? 1 : numBackups; + debug("Creating file appender (", file, ", ", logSize, ", ", numBackups, ", ", options, ")"); var writer = { - stream: openTheStream(file, logSize, numBackups), + stream: openTheStream(file, logSize, numBackups, options), reopen: function() { this.stream.end(); - this.stream = openTheStream(file, logSize, numBackups); + this.stream = openTheStream(file, logSize, numBackups, options); } - } + }; // push file to the stack of open handlers openFiles.push(writer); @@ -62,23 +64,13 @@ function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffs } -function openTheStream(file, fileSize, numFiles) { - var stream; - if (fileSize) { - stream = new streams.RollingFileStream( - file, - fileSize, - numFiles, - { "compress": compress } - ); - } else { - stream = fs.createWriteStream( - file, - { encoding: "utf8", - mode: parseInt('0644', 8), - flags: 'a' } - ); - } +function openTheStream(file, fileSize, numFiles, options) { + var stream = new streams.RollingFileStream( + file, + fileSize, + numFiles, + options + ); stream.on("error", function (err) { console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err); }); @@ -101,8 +93,8 @@ function configure(config, options) { layout, config.maxLogSize, config.backups, - config.compress, - config.timezoneOffset + config.timezoneOffset, + config ); } diff --git a/test/vows/fileAppender-test.js b/test/vows/fileAppender-test.js index e897b77..8f45c7e 100644 --- a/test/vows/fileAppender-test.js +++ b/test/vows/fileAppender-test.js @@ -26,7 +26,7 @@ vows.describe('log4js fileAppender').addBatch({ , count = 5, logfile; while (count--) { - logfile = path.join(__dirname, '/fa-default-test' + count + '.log'); + logfile = path.join(__dirname, 'fa-default-test' + count + '.log'); log4js.addAppender( require('../../lib/appenders/file').appender(logfile), 'default-settings' @@ -36,8 +36,8 @@ vows.describe('log4js fileAppender').addBatch({ return listenersCount; }, - 'does not add more than one `exit` listeners': function (initialCount) { - assert.ok(process.listeners('exit').length <= initialCount + 1); + 'does not add more than one `exit` listener': function (initialCount) { + assert.equal(initialCount + 1, process.listeners('exit').length); } }, @@ -51,7 +51,9 @@ vows.describe('log4js fileAppender').addBatch({ globals: { process: { on: function(evt, listener) { - exitListener = listener; + if (evt == 'exit') { + exitListener = listener; + } } } }, @@ -196,7 +198,7 @@ vows.describe('log4js fileAppender').addBatch({ }, 'with a max file size and no backups': { topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-test.log') + var testFile = path.join(__dirname, 'fa-maxFileSize-test.log') , logger = log4js.getLogger('max-file-size') , that = this; remove(testFile); @@ -234,7 +236,7 @@ vows.describe('log4js fileAppender').addBatch({ }, 'with a max file size and 2 backups': { topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log') + var testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-test.log') , logger = log4js.getLogger('max-file-size-backups'); remove(testFile); remove(testFile+'.1'); @@ -307,7 +309,7 @@ vows.describe('log4js fileAppender').addBatch({ }, 'with a max file size and 2 compressed backups': { topic: function() { - var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-compressed-test.log') + var testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-compressed-test.log') , logger = log4js.getLogger('max-file-size-backups'); remove(testFile); remove(testFile+'.1.gz'); @@ -317,7 +319,7 @@ vows.describe('log4js fileAppender').addBatch({ log4js.clearAppenders(); log4js.addAppender( require('../../lib/appenders/file').appender( - testFile, log4js.layouts.basicLayout, 50, 2, true + testFile, log4js.layouts.basicLayout, 50, 2, { compress: true } ), 'max-file-size-backups' ); From 281386a30c65cba37a111619bfa72935de878777 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 2 Nov 2016 08:46:34 +1100 Subject: [PATCH 14/24] added test for SIGHUP handler --- lib/appenders/file.js | 19 +++++++----------- test/tape/file-sighup-test.js | 37 +++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 12 deletions(-) create mode 100644 test/tape/file-sighup-test.js diff --git a/lib/appenders/file.js b/lib/appenders/file.js index 4e767ad..2893091 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -11,8 +11,9 @@ var debug = require('debug')('log4js:file') //close open files on process exit. process.on('exit', function() { + debug('Exit handler called.'); openFiles.forEach(function (file) { - file.stream.end(); + file.end(); }); }); @@ -20,8 +21,9 @@ process.on('exit', function() { // logrotate. Note that if you are using logrotate, you should not set // `logSize`. process.on('SIGHUP', function() { + debug('SIGHUP handler called.'); openFiles.forEach(function(writer) { - writer.reopen(); + writer.closeTheStream(writer.openTheStream.bind(writer)); }); }); @@ -39,7 +41,6 @@ process.on('SIGHUP', function() { * @param timezoneOffset - optional timezone offset in minutes (default system local) */ function fileAppender (file, layout, logSize, numBackups, options, timezoneOffset) { - var bytesWritten = 0; file = path.normalize(file); layout = layout || layouts.basicLayout; numBackups = numBackups === undefined ? 5 : numBackups; @@ -47,19 +48,13 @@ function fileAppender (file, layout, logSize, numBackups, options, timezoneOffse numBackups = numBackups === 0 ? 1 : numBackups; debug("Creating file appender (", file, ", ", logSize, ", ", numBackups, ", ", options, ")"); - var writer = { - stream: openTheStream(file, logSize, numBackups, options), - reopen: function() { - this.stream.end(); - this.stream = openTheStream(file, logSize, numBackups, options); - } - }; + var writer = openTheStream(file, logSize, numBackups, options); // push file to the stack of open handlers openFiles.push(writer); return function(loggingEvent) { - writer.stream.write(layout(loggingEvent, timezoneOffset) + eol, "utf8"); + writer.write(layout(loggingEvent, timezoneOffset) + eol, "utf8"); }; } @@ -112,7 +107,7 @@ function shutdown(cb) { return cb(); } openFiles.forEach(function(file) { - var stream = file.stream; + var stream = file; if (!stream.write(eol, "utf-8")) { stream.once('drain', function() { stream.end(complete); diff --git a/test/tape/file-sighup-test.js b/test/tape/file-sighup-test.js new file mode 100644 index 0000000..b6e107c --- /dev/null +++ b/test/tape/file-sighup-test.js @@ -0,0 +1,37 @@ +"use strict"; +var test = require('tape') +, sandbox = require('sandboxed-module'); + +test('file appender SIGHUP', function(t) { + var closeCalled = 0 + , openCalled = 0 + , appender = sandbox.require( + '../../lib/appenders/file', + { + 'requires': { + 'streamroller': { + 'RollingFileStream': function() { + this.openTheStream = function() { + openCalled++; + }; + + this.closeTheStream = function(cb) { + closeCalled++; + cb(); + }; + + this.on = function() {}; + } + } + } + } + ).appender('sighup-test-file'); + + process.kill(process.pid, 'SIGHUP'); + t.plan(2); + setTimeout(function() { + t.equal(openCalled, 1, 'open should be called once'); + t.equal(closeCalled, 1, 'close should be called once'); + t.end(); + }, 10); +}); From f4e361120b43b53d4e76cceca874364511399003 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Thu, 3 Nov 2016 08:46:56 +1100 Subject: [PATCH 15/24] fixed broken logstash test --- test/vows/logstashUDP-test.js | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/test/vows/logstashUDP-test.js b/test/vows/logstashUDP-test.js index 8d5cf40..0e2c050 100644 --- a/test/vows/logstashUDP-test.js +++ b/test/vows/logstashUDP-test.js @@ -68,7 +68,8 @@ vows.describe('logstashUDP appender').addBatch({ var fields = { field1: 'value1', field2: 'value2', - level: 'TRACE' + level: 'TRACE', + category: 'myCategory' }; assert.equal(JSON.stringify(json.fields), JSON.stringify(fields)); assert.equal(json.message, 'Log event #1'); @@ -99,7 +100,10 @@ vows.describe('logstashUDP appender').addBatch({ 'it sets some defaults': function (topic) { var json = JSON.parse(topic.results.buffer.toString()); assert.equal(json.type, 'myLogger'); - assert.equal(JSON.stringify(json.fields), JSON.stringify({'level': 'TRACE'})); + assert.equal( + JSON.stringify(json.fields), + JSON.stringify({'level': 'TRACE', 'category': 'myLogger'}) + ); } }, @@ -118,7 +122,12 @@ vows.describe('logstashUDP appender').addBatch({ return setup; },'they should be added to fields structure': function (topic) { var json = JSON.parse(topic.results.buffer.toString()); - var fields = {'extra1': 'value1', 'extra2': 'value2', 'level': 'TRACE'}; + var fields = { + 'extra1': 'value1', + 'extra2': 'value2', + 'level': 'TRACE', + 'category': 'myLogger' + }; assert.equal(JSON.stringify(json.fields), JSON.stringify(fields)); } } From 3a627144fb3d04b4d477b11a74a0e45f2b34f815 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 4 Nov 2016 08:33:40 +1100 Subject: [PATCH 16/24] Shutdown now removes the config reload timer (fix for issue #414) --- examples/reload.js | 14 +++++++++++++ lib/log4js.js | 5 +++++ test/tape/reload-shutdown-test.js | 33 +++++++++++++++++++++++++++++++ test/tape/test-config.json | 5 +++++ 4 files changed, 57 insertions(+) create mode 100644 examples/reload.js create mode 100644 test/tape/reload-shutdown-test.js create mode 100644 test/tape/test-config.json diff --git a/examples/reload.js b/examples/reload.js new file mode 100644 index 0000000..a8ede43 --- /dev/null +++ b/examples/reload.js @@ -0,0 +1,14 @@ +"use strict"; +var path = require('path') +, log4js = require('../lib/log4js'); + +log4js.configure( + // config reloading only works with file-based config (obvs) + path.join(__dirname, '../test/tape/test-config.json'), + { reloadSecs: 10 } +); + +log4js.getLogger('testing').info("Just testing"); +log4js.shutdown(function() { + //callback gets you notified when log4js has finished shutting down. +}); diff --git a/lib/log4js.js b/lib/log4js.js index 4db9dec..7f020d5 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -452,6 +452,11 @@ function shutdown(cb) { // not being able to be drained because of run-away log writes. loggerModule.disableAllLogWrites(); + //turn off config reloading + if (configState.timerId) { + clearInterval(configState.timerId); + } + // Call each of the shutdown functions in parallel var completed = 0; var error; diff --git a/test/tape/reload-shutdown-test.js b/test/tape/reload-shutdown-test.js new file mode 100644 index 0000000..7b26fca --- /dev/null +++ b/test/tape/reload-shutdown-test.js @@ -0,0 +1,33 @@ +"use strict"; +var test = require('tape') +, path = require('path') +, sandbox = require('sandboxed-module'); + +test('Reload configuration shutdown hook', function(t) { + var timerId + , log4js = sandbox.require( + '../../lib/log4js', + { + globals: { + clearInterval: function(id) { + timerId = id; + }, + setInterval: function(fn, time) { + return "1234"; + } + } + } + ); + + log4js.configure( + path.join(__dirname, 'test-config.json'), + { reloadSecs: 30 } + ); + + t.plan(1); + log4js.shutdown(function() { + t.equal(timerId, "1234", "Shutdown should clear the reload timer"); + t.end(); + }); + +}); diff --git a/test/tape/test-config.json b/test/tape/test-config.json new file mode 100644 index 0000000..2a69651 --- /dev/null +++ b/test/tape/test-config.json @@ -0,0 +1,5 @@ +{ + "appenders": [ + { "type": "stdout" } + ] +} From c951546f597346643feffdf8c45204c8a0ac9e80 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Fri, 4 Nov 2016 08:39:50 +1100 Subject: [PATCH 17/24] updated with node version support --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5c865fd..e3d28b8 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,8 @@ The default appender has been changed from `console` to `stdout` - this alleviat I'm also trying to move away from `vows` for the tests, and use `tape` instead. New tests should be added to `test/tape`, not the vows ones. +log4js also no longer supports node versions below 0.12.x. + NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this: ```javascript @@ -95,7 +97,7 @@ You can configure the appenders and log levels manually (as above), or provide a configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The configuration file location may also be specified via the environment variable LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`). -An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`. +An example file can be found in `test/vows/log4js.json`. An example config file with log rolling is in `test/vows/with-log-rolling.json`. You can configure log4js to check for configuration file changes at regular intervals, and if changed, reload. This allows changes to logging levels to occur without restarting the application. To turn it on and specify a period: From eac6295e4c49ea6edd822ebe7aaa2fe34fb6190f Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 6 Nov 2016 15:47:45 +1100 Subject: [PATCH 18/24] fix for #418 - date formats broken in file appenders --- examples/example.js | 4 +--- examples/fromreadme.js | 2 +- examples/log-to-files.js | 36 ++++++++++++++++++++++++++++++++++++ lib/appenders/dateFile.js | 6 +++--- lib/appenders/file.js | 6 +++--- 5 files changed, 44 insertions(+), 10 deletions(-) create mode 100644 examples/log-to-files.js diff --git a/examples/example.js b/examples/example.js index d304cc4..8879a23 100644 --- a/examples/example.js +++ b/examples/example.js @@ -1,3 +1,4 @@ +"use strict"; var log4js = require('../lib/log4js'); //log the cheese logger messages to a file, and the console ones as well. log4js.configure({ @@ -55,6 +56,3 @@ anotherLogger.debug("Just checking"); //will also go to console, since that's configured for all categories var pantsLog = log4js.getLogger('pants'); pantsLog.debug("Something for pants"); - - - diff --git a/examples/fromreadme.js b/examples/fromreadme.js index 71b399a..8d837f4 100644 --- a/examples/fromreadme.js +++ b/examples/fromreadme.js @@ -1,5 +1,5 @@ //remember to change the require to just 'log4js' if you've npm install'ed it -var log4js = require('./lib/log4js'); +var log4js = require('../lib/log4js'); //by default the console appender is loaded //log4js.loadAppender('console'); //you'd only need to add the console appender if you diff --git a/examples/log-to-files.js b/examples/log-to-files.js new file mode 100644 index 0000000..6f140da --- /dev/null +++ b/examples/log-to-files.js @@ -0,0 +1,36 @@ +"use strict"; +var path = require('path') +, log4js = require('../lib/log4js'); + +log4js.configure( + { + appenders: [ + { + type: "file", + filename: "important-things.log", + maxLogSize: 10*1024*1024, // = 10Mb + numBackups: 5, // keep five backup files + compress: true, // compress the backups + encoding: 'utf-8', + mode: parseInt('0640', 8), + flags: 'w+' + }, + { + type: "dateFile", + filename: "more-important-things.log", + pattern: "yyyy-MM-dd-hh", + compress: true + }, + { + type: "stdout" + } + ] + } +); + +var logger = log4js.getLogger('things'); +logger.debug("This little thing went to market"); +logger.info("This little thing stayed at home"); +logger.error("This little thing had roast beef"); +logger.fatal("This little thing had none"); +logger.trace("and this little thing went wee, wee, wee, all the way home."); diff --git a/lib/appenders/dateFile.js b/lib/appenders/dateFile.js index 86635d7..5451d15 100644 --- a/lib/appenders/dateFile.js +++ b/lib/appenders/dateFile.js @@ -21,13 +21,13 @@ process.on('exit', function() { * @layout layout function for log messages - defaults to basicLayout * @timezoneOffset optional timezone offset in minutes - defaults to system local */ -function appender(filename, pattern, alwaysIncludePattern, layout, timezoneOffset) { +function appender(filename, pattern, layout, options, timezoneOffset) { layout = layout || layouts.basicLayout; var logFile = new streams.DateRollingFileStream( filename, pattern, - { alwaysIncludePattern: alwaysIncludePattern } + options ); openFiles.push(logFile); @@ -55,8 +55,8 @@ function configure(config, options) { return appender( config.filename, config.pattern, - config.alwaysIncludePattern, layout, + config, config.timezoneOffset ); } diff --git a/lib/appenders/file.js b/lib/appenders/file.js index 2893091..bbb40dd 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -47,7 +47,7 @@ function fileAppender (file, layout, logSize, numBackups, options, timezoneOffse //there has to be at least one backup if logSize has been specified numBackups = numBackups === 0 ? 1 : numBackups; - debug("Creating file appender (", file, ", ", logSize, ", ", numBackups, ", ", options, ")"); + debug("Creating file appender (", file, ", ", logSize, ", ", numBackups, ", ", options, ", ", timezoneOffset, ")"); var writer = openTheStream(file, logSize, numBackups, options); // push file to the stack of open handlers @@ -88,8 +88,8 @@ function configure(config, options) { layout, config.maxLogSize, config.backups, - config.timezoneOffset, - config + config, + config.timezoneOffset ); } From 3f93872eccbc0a0ff29eaeff0f96d026a2a47d4f Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 6 Nov 2016 15:49:06 +1100 Subject: [PATCH 19/24] fixed lint problem --- lib/appenders/file.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/appenders/file.js b/lib/appenders/file.js index bbb40dd..021f261 100644 --- a/lib/appenders/file.js +++ b/lib/appenders/file.js @@ -47,7 +47,13 @@ function fileAppender (file, layout, logSize, numBackups, options, timezoneOffse //there has to be at least one backup if logSize has been specified numBackups = numBackups === 0 ? 1 : numBackups; - debug("Creating file appender (", file, ", ", logSize, ", ", numBackups, ", ", options, ", ", timezoneOffset, ")"); + debug("Creating file appender (", + file, ", ", + logSize, ", ", + numBackups, ", ", + options, ", ", + timezoneOffset, ")" + ); var writer = openTheStream(file, logSize, numBackups, options); // push file to the stack of open handlers From e4f196a6823308168c917567b5e456a67c1ea699 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Sun, 6 Nov 2016 15:49:31 +1100 Subject: [PATCH 20/24] 1.0.1 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2d5d968..a113f81 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "log4js", - "version": "1.0.0", + "version": "1.0.1", "description": "Port of Log4js to work with node.", "keywords": [ "logging", From 169627a4647209343b4bfc124e0c8e8ee28f12a9 Mon Sep 17 00:00:00 2001 From: sparklton Date: Sun, 6 Nov 2016 08:45:19 +0200 Subject: [PATCH 21/24] added description for using dependancies --- lib/appenders/logFacesAppender.js | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/lib/appenders/logFacesAppender.js b/lib/appenders/logFacesAppender.js index 7f2c6dc..efe6858 100644 --- a/lib/appenders/logFacesAppender.js +++ b/lib/appenders/logFacesAppender.js @@ -1,3 +1,15 @@ +/** +* logFaces appender sends JSON formatted log events to logFaces receivers. +* There are two types of receivers supported - raw UDP sockets (for server side apps), +* and HTTP (for client side apps). Depending on the usage, this appender +* requires either of the two: +* +* For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html' +* For HTTP require 'axios', see 'https://www.npmjs.com/package/axios' +* +* Make sure your project have relevant dependancy installed before using this appender. +*/ + "use strict"; var util = require('util'); var context = {}; @@ -41,11 +53,9 @@ function servlet(config){ } /** -* logFaces appender sends JSON formatted log events to logFaces receivers. -* There are two types of receivers targetted - raw UDP sockets and HTTP. * For UDP (node.js) use the following configuration params: * { -* "type": "logFacesAppender", +* "type": "logFacesAppender", // must be present for instantiation * "application": "LFS-TEST", // name of the application (domain) * "remoteHost": "127.0.0.1", // logFaces server address (hostname) * "port": 55201 // UDP receiver listening port @@ -53,9 +63,9 @@ function servlet(config){ * * For HTTP (browsers or node.js) use the following configuration params: * { -* "type": "logFacesAppender", -* "application": "LFS-TEST", // name of the application (domain) -* "url": "http://lfs-server/..", // logFaces receiver binding name +* "type": "logFacesAppender", // must be present for instantiation +* "application": "LFS-TEST", // name of the application (domain) +* "url": "http://lfs-server/logs", // logFaces receiver servlet URL * } */ function logFacesAppender(config) { From 7368cd33dcdee6cfc6d1c071cbdf301c90185357 Mon Sep 17 00:00:00 2001 From: Gareth Jones Date: Wed, 9 Nov 2016 07:50:56 +1100 Subject: [PATCH 22/24] fix for #419 - calling replace console outside of configure breaks things --- lib/log4js.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/log4js.js b/lib/log4js.js index 7f020d5..653d4bc 100644 --- a/lib/log4js.js +++ b/lib/log4js.js @@ -302,13 +302,12 @@ function loadConfigurationFile(filename) { function configureOnceOff(config, options) { if (config) { try { + restoreConsole(); configureLevels(config.levels); configureAppenders(config.appenders, options); if (config.replaceConsole) { replaceConsole(); - } else { - restoreConsole(); } } catch (e) { throw new Error( From 9eb0f9dbd06e6178356fbb291a9827d318be4f23 Mon Sep 17 00:00:00 2001 From: Chi Thu Le Date: Thu, 10 Nov 2016 10:33:45 +0100 Subject: [PATCH 23/24] fix for #315 - Include milliseconds in format ISO8601_WITH_TZ_OFFSET --- lib/date_format.js | 2 +- test/vows/date_format-test.js | 4 ++-- test/vows/layouts-test.js | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/date_format.js b/lib/date_format.js index b9e0131..109f931 100644 --- a/lib/date_format.js +++ b/lib/date_format.js @@ -1,6 +1,6 @@ "use strict"; exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS"; -exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO"; +exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ss.SSSO"; exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS"; exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS"; diff --git a/test/vows/date_format-test.js b/test/vows/date_format-test.js index 02a545a..a6fe52e 100644 --- a/test/vows/date_format-test.js +++ b/test/vows/date_format-test.js @@ -28,14 +28,14 @@ vows.describe('date_format').addBatch({ date.getTimezoneOffset = function() { return -660; }; assert.equal( dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date), - "2010-01-11T14:31:30+1100" + "2010-01-11T14:31:30.005+1100" ); date = createFixedDate(); date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120); date.getTimezoneOffset = function() { return 120; }; assert.equal( dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date), - "2010-01-11T14:31:30-0200" + "2010-01-11T14:31:30.005-0200" ); }, diff --git a/test/vows/layouts-test.js b/test/vows/layouts-test.js index 7a7a606..84971a5 100644 --- a/test/vows/layouts-test.js +++ b/test/vows/layouts-test.js @@ -252,7 +252,7 @@ vows.describe('log4js layouts').addBatch({ test(args, '%d', '2010-12-05 14:18:30.045'); }, '%d should allow for format specification': function(args) { - test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000'); + test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30.045-0000'); test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045'); test(args, '%d{ABSOLUTE}', '14:18:30.045'); test(args, '%d{DATE}', '05 12 2010 14:18:30.045'); From 0af3f1607f8baf2ae8c99772db37a7b80399a1c0 Mon Sep 17 00:00:00 2001 From: Nathan Woltman Date: Thu, 10 Nov 2016 11:32:09 -0500 Subject: [PATCH 24/24] Improve core logging performance Allow V8 to optimize functions by not leaking the arguments object. This results in a performance improvement of about 20%. --- lib/layouts.js | 11 +++++++++-- lib/logger.js | 5 ++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/lib/layouts.js b/lib/layouts.js index 75b6e1e..e44eef6 100644 --- a/lib/layouts.js +++ b/lib/layouts.js @@ -33,7 +33,7 @@ function wrapErrorsWithInspect(items) { if (semver.satisfies(process.version, '>=6')) { return util.format(item); } else { - return util.format(item) + '\n' + item.stack; + return util.format(item) + '\n' + item.stack; } } }; } else { @@ -43,7 +43,14 @@ function wrapErrorsWithInspect(items) { } function formatLogData(logData) { - var data = Array.isArray(logData) ? logData : Array.prototype.slice.call(arguments); + var data = logData; + if (!Array.isArray(data)) { + var numArgs = arguments.length; + data = new Array(numArgs); + for (var i = 0; i < numArgs; i++) { + data[i] = arguments[i]; + } + } return util.format.apply(util, wrapErrorsWithInspect(data)); } diff --git a/lib/logger.js b/lib/logger.js index 75ac7ae..c596ba0 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -82,7 +82,10 @@ function addLevelMethods(level) { Logger.prototype[levelMethod] = function () { if (logWritesEnabled && this.isLevelEnabled(level)) { var numArgs = arguments.length; - var args = Array.prototype.slice.call(arguments); + var args = new Array(numArgs); + for (var i = 0; i < numArgs; i++) { + args[i] = arguments[i]; + } this._log(level, args); } };