mirror of
https://github.com/log4js-node/log4js-node.git
synced 2025-12-08 19:26:01 +00:00
fixing some jshint errors
This commit is contained in:
parent
d6642e01cf
commit
97fc892c00
@ -1,15 +1,15 @@
|
||||
{
|
||||
{
|
||||
"node": true,
|
||||
"laxcomma": true,
|
||||
"indent": 2,
|
||||
"globalstrict": true,
|
||||
"maxparams": 5,
|
||||
"maxparams": 6,
|
||||
"maxdepth": 3,
|
||||
"maxstatements": 20,
|
||||
"maxcomplexity": 5,
|
||||
"maxcomplexity": 10,
|
||||
"maxlen": 100,
|
||||
"globals": {
|
||||
"describe": true,
|
||||
"it": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,11 +19,11 @@ process.on('exit', function() {
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
* @param compress - flag that controls log file compression
|
||||
* @param timezoneOffset - optional timezone offset in minutes (default system local)
|
||||
@ -47,9 +47,9 @@ function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffs
|
||||
);
|
||||
} else {
|
||||
stream = fs.createWriteStream(
|
||||
file,
|
||||
{ encoding: "utf8",
|
||||
mode: parseInt('0644', 8),
|
||||
file,
|
||||
{ encoding: "utf8",
|
||||
mode: parseInt('0644', 8),
|
||||
flags: 'a' }
|
||||
);
|
||||
}
|
||||
@ -60,7 +60,7 @@ function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffs
|
||||
}
|
||||
|
||||
var logFile = openTheStream(file, logSize, numBackups);
|
||||
|
||||
|
||||
// push file to the stack of open handlers
|
||||
openFiles.push(logFile);
|
||||
|
||||
@ -80,7 +80,14 @@ function configure(config, options) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.compress, config.timezoneOffset);
|
||||
return fileAppender(
|
||||
config.filename,
|
||||
layout,
|
||||
config.maxLogSize,
|
||||
config.backups,
|
||||
config.compress,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
|
||||
@ -16,15 +16,15 @@ function RollingFileSync (filename, size, backups, options) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
|
||||
this.filename = filename;
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
||||
this.currentSize = 0;
|
||||
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
@ -47,15 +47,15 @@ RollingFileSync.prototype.shouldRoll = function() {
|
||||
RollingFileSync.prototype.roll = function(filename) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
@ -77,7 +77,7 @@ RollingFileSync.prototype.roll = function(filename) {
|
||||
} catch(e) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
}
|
||||
|
||||
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.renameSync(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1));
|
||||
}
|
||||
@ -86,7 +86,7 @@ RollingFileSync.prototype.roll = function(filename) {
|
||||
function renameTheFiles() {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
|
||||
|
||||
var files = fs.readdirSync(path.dirname(filename));
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
|
||||
}
|
||||
@ -97,8 +97,8 @@ RollingFileSync.prototype.roll = function(filename) {
|
||||
|
||||
RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
var that = this;
|
||||
|
||||
|
||||
|
||||
|
||||
function writeTheChunk() {
|
||||
debug("writing the chunk to the file");
|
||||
that.currentSize += chunk.length;
|
||||
@ -106,13 +106,13 @@ RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
}
|
||||
|
||||
debug("in write");
|
||||
|
||||
|
||||
|
||||
if (this.shouldRoll()) {
|
||||
this.currentSize = 0;
|
||||
this.roll(this.filename);
|
||||
}
|
||||
|
||||
|
||||
writeTheChunk();
|
||||
};
|
||||
|
||||
@ -121,11 +121,11 @@ RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
* @param timezoneOffset - optional timezone offset in minutes
|
||||
* (default system local)
|
||||
@ -141,7 +141,7 @@ function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
|
||||
|
||||
function openTheStream(file, fileSize, numFiles) {
|
||||
var stream;
|
||||
|
||||
|
||||
if (fileSize) {
|
||||
stream = new RollingFileSync(
|
||||
file,
|
||||
@ -153,7 +153,7 @@ function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
|
||||
// create file if it doesn't exist
|
||||
if (!fs.existsSync(f))
|
||||
fs.appendFileSync(f, '');
|
||||
|
||||
|
||||
return {
|
||||
write: function(data) {
|
||||
fs.appendFileSync(f, data);
|
||||
@ -166,7 +166,7 @@ function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
|
||||
}
|
||||
|
||||
var logFile = openTheStream(file, logSize, numBackups);
|
||||
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
|
||||
};
|
||||
@ -182,7 +182,13 @@ function configure(config, options) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.timezoneOffset);
|
||||
return fileAppender(
|
||||
config.filename,
|
||||
layout,
|
||||
config.maxLogSize,
|
||||
config.backups,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
|
||||
@ -46,7 +46,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
facility = config.facility;
|
||||
customFields = config.customFields;
|
||||
}
|
||||
|
||||
|
||||
host = host || 'localhost';
|
||||
port = port || 12201;
|
||||
hostname = hostname || require('os').hostname();
|
||||
@ -55,18 +55,18 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var defaultCustomFields = customFields || {};
|
||||
|
||||
if(facility) {
|
||||
defaultCustomFields['_facility'] = facility;
|
||||
defaultCustomFields._facility = facility;
|
||||
}
|
||||
|
||||
client = dgram.createSocket("udp4");
|
||||
|
||||
|
||||
process.on('exit', function() {
|
||||
if (client) client.close();
|
||||
});
|
||||
|
||||
/**
|
||||
* Add custom fields (start with underscore )
|
||||
* - if the first object passed to the logger contains 'GELF' field,
|
||||
* Add custom fields (start with underscore )
|
||||
* - if the first object passed to the logger contains 'GELF' field,
|
||||
* copy the underscore fields to the message
|
||||
* @param loggingEvent
|
||||
* @param msg
|
||||
@ -76,7 +76,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
/* append defaultCustomFields firsts */
|
||||
Object.keys(defaultCustomFields).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) && key !== "_id") {
|
||||
if (key.match(/^_/) && key !== "_id") {
|
||||
msg[key] = defaultCustomFields[key];
|
||||
}
|
||||
});
|
||||
@ -85,33 +85,33 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var data = loggingEvent.data;
|
||||
if (!Array.isArray(data) || data.length === 0) return;
|
||||
var firstData = data[0];
|
||||
|
||||
|
||||
if (!firstData.GELF) return; // identify with GELF field defined
|
||||
// Remove the GELF key, some gelf supported logging systems drop the message with it
|
||||
delete firstData.GELF;
|
||||
Object.keys(firstData).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) || key !== "_id") {
|
||||
if (key.match(/^_/) || key !== "_id") {
|
||||
msg[key] = firstData[key];
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/* the custom field object should be removed, so it will not be looged by the later appenders */
|
||||
loggingEvent.data.shift();
|
||||
loggingEvent.data.shift();
|
||||
}
|
||||
|
||||
|
||||
function preparePacket(loggingEvent) {
|
||||
var msg = {};
|
||||
addCustomFields(loggingEvent, msg);
|
||||
msg.short_message = layout(loggingEvent);
|
||||
|
||||
|
||||
msg.version="1.1";
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.host = hostname;
|
||||
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
|
||||
return msg;
|
||||
}
|
||||
|
||||
|
||||
function sendPacket(packet) {
|
||||
try {
|
||||
client.send(packet, 0, packet.length, port, host);
|
||||
|
||||
@ -6,7 +6,7 @@ var layouts = require('../layouts')
|
||||
|
||||
|
||||
function isAnyObject(value) {
|
||||
return value != null && (typeof value === 'object' || typeof value === 'function');
|
||||
return value !== null && (typeof value === 'object' || typeof value === 'function');
|
||||
}
|
||||
|
||||
function numKeys(o) {
|
||||
@ -50,7 +50,7 @@ function processTags(msgListArgs) {
|
||||
* {
|
||||
* token: 'your-really-long-input-token',
|
||||
* subdomain: 'your-subdomain',
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* }
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
|
||||
*/
|
||||
@ -74,7 +74,7 @@ function logglyAppender(config, layout) {
|
||||
category: loggingEvent.categoryName,
|
||||
hostname: os.hostname().toString(),
|
||||
}, additionalTags);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
|
||||
@ -16,11 +16,11 @@ function mailgunAppender(_config, _layout) {
|
||||
to: _config.to,
|
||||
subject: _config.subject,
|
||||
text: layout(loggingEvent, config.timezoneOffset)
|
||||
}
|
||||
};
|
||||
|
||||
mailgun.messages().send(data, function (error, body) {
|
||||
if (error != null) console.error("log4js.mailgunAppender - Error happened", error);
|
||||
})
|
||||
if (error !== null) console.error("log4js.mailgunAppender - Error happened", error);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@ -41,4 +41,3 @@ function configure(_config) {
|
||||
|
||||
exports.appender = mailgunAppender;
|
||||
exports.configure = configure;
|
||||
|
||||
|
||||
@ -9,7 +9,7 @@ var log4js = require('../log4js')
|
||||
* set up that appender).
|
||||
*/
|
||||
function logServer(config) {
|
||||
|
||||
|
||||
/**
|
||||
* Takes a utf-8 string, returns an object with
|
||||
* the correct log properties.
|
||||
@ -32,21 +32,21 @@ function logServer(config) {
|
||||
|
||||
loggingEvent.remoteAddress = clientSocket.remoteAddress;
|
||||
loggingEvent.remotePort = clientSocket.remotePort;
|
||||
|
||||
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
|
||||
var actualAppender = config.actualAppender,
|
||||
server = net.createServer(function serverCreated(clientSocket) {
|
||||
clientSocket.setEncoding('utf8');
|
||||
var logMessage = '';
|
||||
|
||||
|
||||
function logTheMessage(msg) {
|
||||
if (logMessage.length > 0) {
|
||||
actualAppender(deserializeLoggingEvent(clientSocket, msg));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function chunkReceived(chunk) {
|
||||
var event;
|
||||
logMessage += chunk || '';
|
||||
@ -58,13 +58,13 @@ function logServer(config) {
|
||||
chunkReceived();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
clientSocket.on('data', chunkReceived);
|
||||
clientSocket.on('end', chunkReceived);
|
||||
});
|
||||
|
||||
|
||||
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
|
||||
|
||||
return actualAppender;
|
||||
}
|
||||
|
||||
@ -72,9 +72,9 @@ function workerAppender(config) {
|
||||
var canWrite = false,
|
||||
buffer = [],
|
||||
socket;
|
||||
|
||||
|
||||
createSocket();
|
||||
|
||||
|
||||
function createSocket() {
|
||||
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
socket.on('connect', function() {
|
||||
@ -85,24 +85,25 @@ function workerAppender(config) {
|
||||
//don't bother listening for 'error', 'close' gets called after that anyway
|
||||
socket.on('close', createSocket);
|
||||
}
|
||||
|
||||
|
||||
function emptyBuffer() {
|
||||
var evt;
|
||||
while ((evt = buffer.shift())) {
|
||||
write(evt);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function write(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { // Validate that we really are in this case
|
||||
// Validate that we really are in this case
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') {
|
||||
loggingEvent = {stack : loggingEvent.stack};
|
||||
}
|
||||
socket.write(JSON.stringify(loggingEvent), 'utf8');
|
||||
socket.write(END_MSG, 'utf8');
|
||||
}
|
||||
|
||||
|
||||
return function log(loggingEvent) {
|
||||
if (canWrite) {
|
||||
write(loggingEvent);
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
var layouts = require('../layouts')
|
||||
var layouts = require('../layouts');
|
||||
|
||||
function stderrAppender(layout, timezoneOffset) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
|
||||
@ -105,11 +105,13 @@ function getLogger(logger4js, options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds custom {token, replacement} objects to defaults, overwriting the defaults if any tokens clash
|
||||
* Adds custom {token, replacement} objects to defaults,
|
||||
* overwriting the defaults if any tokens clash
|
||||
*
|
||||
* @param {IncomingMessage} req
|
||||
* @param {ServerResponse} res
|
||||
* @param {Array} custom_tokens [{ token: string-or-regexp, replacement: string-or-replace-function }]
|
||||
* @param {Array} custom_tokens
|
||||
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
|
||||
* @return {Array}
|
||||
*/
|
||||
function assemble_tokens(req, res, custom_tokens) {
|
||||
@ -133,13 +135,36 @@ function assemble_tokens(req, res, custom_tokens) {
|
||||
default_tokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode });
|
||||
default_tokens.push({ token: ':response-time', replacement: res.responseTime });
|
||||
default_tokens.push({ token: ':date', replacement: new Date().toUTCString() });
|
||||
default_tokens.push({ token: ':referrer', replacement: req.headers.referer || req.headers.referrer || '' });
|
||||
default_tokens.push({ token: ':http-version', replacement: req.httpVersionMajor + '.' + req.httpVersionMinor });
|
||||
default_tokens.push({ token: ':remote-addr', replacement: req.headers['x-forwarded-for'] || req.ip || req._remoteAddress ||
|
||||
(req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))) });
|
||||
default_tokens.push({
|
||||
token: ':referrer',
|
||||
replacement: req.headers.referer || req.headers.referrer || ''
|
||||
});
|
||||
default_tokens.push({
|
||||
token: ':http-version',
|
||||
replacement: req.httpVersionMajor + '.' + req.httpVersionMinor
|
||||
});
|
||||
default_tokens.push({
|
||||
token: ':remote-addr',
|
||||
replacement:
|
||||
req.headers['x-forwarded-for'] ||
|
||||
req.ip ||
|
||||
req._remoteAddress ||
|
||||
(req.socket &&
|
||||
(req.socket.remoteAddress ||
|
||||
(req.socket.socket && req.socket.socket.remoteAddress)
|
||||
)
|
||||
)
|
||||
}
|
||||
);
|
||||
default_tokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] });
|
||||
default_tokens.push({ token: ':content-length', replacement: (res._headers && res._headers['content-length']) ||
|
||||
(res.__headers && res.__headers['Content-Length']) || '-' });
|
||||
default_tokens.push({
|
||||
token: ':content-length',
|
||||
replacement:
|
||||
(res._headers && res._headers['content-length']) ||
|
||||
(res.__headers && res.__headers['Content-Length']) ||
|
||||
'-'
|
||||
}
|
||||
);
|
||||
default_tokens.push({ token: /:req\[([^\]]+)\]/g, replacement: function(_, field) {
|
||||
return req.headers[field.toLowerCase()];
|
||||
} });
|
||||
@ -150,7 +175,7 @@ function assemble_tokens(req, res, custom_tokens) {
|
||||
} });
|
||||
|
||||
return array_unique_tokens(custom_tokens.concat(default_tokens));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Return formatted log line.
|
||||
|
||||
@ -5,23 +5,23 @@ var dateFormat = require('./date_format')
|
||||
, util = require('util')
|
||||
, replacementRegExp = /%[sdj]/g
|
||||
, layoutMakers = {
|
||||
"messagePassThrough": function() { return messagePassThroughLayout; },
|
||||
"basic": function() { return basicLayout; },
|
||||
"colored": function() { return colouredLayout; },
|
||||
"coloured": function() { return colouredLayout; },
|
||||
"messagePassThrough": function() { return messagePassThroughLayout; },
|
||||
"basic": function() { return basicLayout; },
|
||||
"colored": function() { return colouredLayout; },
|
||||
"coloured": function() { return colouredLayout; },
|
||||
"pattern": function (config) {
|
||||
return patternLayout(config && config.pattern, config && config.tokens);
|
||||
},
|
||||
"dummy": function() { return dummyLayout; }
|
||||
}
|
||||
, colours = {
|
||||
ALL: "grey",
|
||||
TRACE: "blue",
|
||||
DEBUG: "cyan",
|
||||
INFO: "green",
|
||||
WARN: "yellow",
|
||||
ERROR: "red",
|
||||
FATAL: "magenta",
|
||||
ALL: "grey",
|
||||
TRACE: "blue",
|
||||
DEBUG: "cyan",
|
||||
INFO: "green",
|
||||
WARN: "yellow",
|
||||
ERROR: "red",
|
||||
FATAL: "magenta",
|
||||
OFF: "grey"
|
||||
};
|
||||
|
||||
@ -95,7 +95,11 @@ function timestampLevelAndCategory(loggingEvent, colour, timezoneOffest) {
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function basicLayout (loggingEvent, timezoneOffset) {
|
||||
return timestampLevelAndCategory(loggingEvent, undefined, timezoneOffset) + formatLogData(loggingEvent.data);
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
undefined,
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -135,7 +139,7 @@ function dummyLayout(loggingEvent) {
|
||||
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
|
||||
* You can use %[ and %] to define a colored block.
|
||||
*
|
||||
* Tokens are specified as simple key:value objects.
|
||||
* Tokens are specified as simple key:value objects.
|
||||
* The key represents the token name whereas the value can be a string or function
|
||||
* which is called to extract the value to put in the log message. If token is not
|
||||
* found, it doesn't replace the field.
|
||||
@ -153,7 +157,7 @@ function dummyLayout(loggingEvent) {
|
||||
function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzxy%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
|
||||
|
||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||
|
||||
function categoryName(loggingEvent, specifier) {
|
||||
@ -176,7 +180,7 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
if (format == "ISO8601") {
|
||||
format = dateFormat.ISO8601_FORMAT;
|
||||
} else if (format == "ISO8601_WITH_TZ_OFFSET") {
|
||||
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
|
||||
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
|
||||
} else if (format == "ABSOLUTE") {
|
||||
format = dateFormat.ABSOLUTETIME_FORMAT;
|
||||
} else if (format == "DATE") {
|
||||
@ -186,7 +190,7 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
// Format the date
|
||||
return dateFormat.asString(format, loggingEvent.startTime, timezoneOffset);
|
||||
}
|
||||
|
||||
|
||||
function hostname() {
|
||||
return os.hostname().toString();
|
||||
}
|
||||
@ -194,7 +198,7 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
function formatMessage(loggingEvent) {
|
||||
return formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
|
||||
function endOfLine() {
|
||||
return eol;
|
||||
}
|
||||
@ -226,7 +230,7 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
return process.pid;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function clusterInfo(loggingEvent, specifier) {
|
||||
if (loggingEvent.cluster && specifier) {
|
||||
return specifier
|
||||
@ -300,12 +304,19 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
}
|
||||
return toPad;
|
||||
}
|
||||
|
||||
|
||||
function truncateAndPad(toTruncAndPad, truncation, padding) {
|
||||
var replacement = toTruncAndPad;
|
||||
replacement = truncate(truncation, replacement);
|
||||
replacement = pad(padding, replacement);
|
||||
return replacement;
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var formattedString = "";
|
||||
var result;
|
||||
var searchString = pattern;
|
||||
|
||||
|
||||
while ((result = regex.exec(searchString))) {
|
||||
var matchedString = result[0];
|
||||
var padding = result[1];
|
||||
@ -313,7 +324,7 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
var conversionCharacter = result[3];
|
||||
var specifier = result[5];
|
||||
var text = result[6];
|
||||
|
||||
|
||||
// Check if the pattern matched was just normal text
|
||||
if (text) {
|
||||
formattedString += "" + text;
|
||||
@ -321,12 +332,7 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
// Create a raw replacement string based on the conversion
|
||||
// character and specifier
|
||||
var replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
|
||||
|
||||
// Format the replacement according to any padding or
|
||||
// truncation specified
|
||||
replacement = truncate(truncation, replacement);
|
||||
replacement = pad(padding, replacement);
|
||||
formattedString += replacement;
|
||||
formattedString += truncateAndPad(replacement, truncation, padding);
|
||||
}
|
||||
searchString = searchString.substr(result.index + result[0].length);
|
||||
}
|
||||
@ -336,11 +342,11 @@ function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
basicLayout: basicLayout,
|
||||
messagePassThroughLayout: messagePassThroughLayout,
|
||||
patternLayout: patternLayout,
|
||||
colouredLayout: colouredLayout,
|
||||
coloredLayout: colouredLayout,
|
||||
basicLayout: basicLayout,
|
||||
messagePassThroughLayout: messagePassThroughLayout,
|
||||
patternLayout: patternLayout,
|
||||
colouredLayout: colouredLayout,
|
||||
coloredLayout: colouredLayout,
|
||||
dummyLayout: dummyLayout,
|
||||
addLayout: function(name, serializerGenerator) {
|
||||
layoutMakers[name] = serializerGenerator;
|
||||
|
||||
@ -97,7 +97,7 @@ function normalizeCategory (category) {
|
||||
return category + '.';
|
||||
}
|
||||
|
||||
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
|
||||
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
|
||||
var normalizedLevelCategory = normalizeCategory(levelCategory);
|
||||
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
|
||||
return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory;
|
||||
@ -125,11 +125,12 @@ function getLogger (loggerCategoryName) {
|
||||
|
||||
if (!hasLogger(loggerCategoryName)) {
|
||||
|
||||
var level = undefined;
|
||||
var level;
|
||||
|
||||
// If there's a "levels" entry in the configuration
|
||||
if (levels.config) {
|
||||
// Goes through the categories in the levels configuration entry, starting by the "higher" ones.
|
||||
// Goes through the categories in the levels configuration entry,
|
||||
// starting with the "higher" ones.
|
||||
var keys = Object.keys(levels.config).sort();
|
||||
for (var idx = 0; idx < keys.length; idx++) {
|
||||
var levelCategory = keys[idx];
|
||||
@ -139,7 +140,7 @@ function getLogger (loggerCategoryName) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
|
||||
|
||||
@ -159,7 +160,7 @@ function getLogger (loggerCategoryName) {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return loggers[loggerCategoryName];
|
||||
}
|
||||
|
||||
@ -176,10 +177,10 @@ function addAppender () {
|
||||
if (Array.isArray(args[0])) {
|
||||
args = args[0];
|
||||
}
|
||||
|
||||
|
||||
args.forEach(function(appenderCategory) {
|
||||
addAppenderToCategory(appender, appenderCategory);
|
||||
|
||||
|
||||
if (appenderCategory === ALL_CATEGORIES) {
|
||||
addAppenderToAllLoggers(appender);
|
||||
} else {
|
||||
@ -189,7 +190,7 @@ function addAppender () {
|
||||
loggers[loggerCategory].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -243,7 +244,7 @@ function configureLevels(_levels) {
|
||||
var category = keys[idx];
|
||||
if(category === ALL_CATEGORIES) {
|
||||
setGlobalLogLevel(_levels[category]);
|
||||
}
|
||||
}
|
||||
for(var loggerCategory in loggers) {
|
||||
if (doesLevelEntryContainsLogger(category, loggerCategory)) {
|
||||
loggers[loggerCategory].setLevel(_levels[category]);
|
||||
@ -280,7 +281,7 @@ function configureOnceOff(config, options) {
|
||||
try {
|
||||
configureLevels(config.levels);
|
||||
configureAppenders(config.appenders, options);
|
||||
|
||||
|
||||
if (config.replaceConsole) {
|
||||
replaceConsole();
|
||||
} else {
|
||||
@ -288,7 +289,7 @@ function configureOnceOff(config, options) {
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
"Problem reading log4js config " + util.inspect(config) +
|
||||
"Problem reading log4js config " + util.inspect(config) +
|
||||
". Error was \"" + e.message + "\" (" + e.stack + ")"
|
||||
);
|
||||
}
|
||||
@ -298,7 +299,7 @@ function configureOnceOff(config, options) {
|
||||
function reloadConfiguration(options) {
|
||||
var mtime = getMTime(configState.filename);
|
||||
if (!mtime) return;
|
||||
|
||||
|
||||
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
|
||||
configureOnceOff(loadConfigurationFile(configState.filename), options);
|
||||
}
|
||||
@ -329,7 +330,7 @@ function configure(configurationFileOrObject, options) {
|
||||
var config = configurationFileOrObject;
|
||||
config = config || process.env.LOG4JS_CONFIG;
|
||||
options = options || {};
|
||||
|
||||
|
||||
if (config === undefined || config === null || typeof(config) === 'string') {
|
||||
if (options.reloadSecs) {
|
||||
initReloadConfiguration(config, options);
|
||||
@ -455,19 +456,19 @@ module.exports = {
|
||||
getLogger: getLogger,
|
||||
getDefaultLogger: getDefaultLogger,
|
||||
hasLogger: hasLogger,
|
||||
|
||||
|
||||
addAppender: addAppender,
|
||||
loadAppender: loadAppender,
|
||||
clearAppenders: clearAppenders,
|
||||
configure: configure,
|
||||
shutdown: shutdown,
|
||||
|
||||
|
||||
replaceConsole: replaceConsole,
|
||||
restoreConsole: restoreConsole,
|
||||
|
||||
|
||||
levels: levels,
|
||||
setGlobalLogLevel: setGlobalLogLevel,
|
||||
|
||||
|
||||
layouts: layouts,
|
||||
appenders: {},
|
||||
appenderMakers: appenderMakers,
|
||||
@ -476,4 +477,3 @@ module.exports = {
|
||||
|
||||
//set ourselves up
|
||||
configure();
|
||||
|
||||
|
||||
@ -7,6 +7,10 @@ var BaseRollingFileStream = require('./BaseRollingFileStream')
|
||||
|
||||
module.exports = DateRollingFileStream;
|
||||
|
||||
function findTimestampFromFileIfExists(filename, now) {
|
||||
return fs.existsSync(filename) ? fs.statSync(filename).mtime : new Date(now());
|
||||
}
|
||||
|
||||
function DateRollingFileStream(filename, pattern, options, now) {
|
||||
debug("Now is " + now);
|
||||
if (pattern && typeof(pattern) === 'object') {
|
||||
@ -16,29 +20,26 @@ function DateRollingFileStream(filename, pattern, options, now) {
|
||||
}
|
||||
this.pattern = pattern || '.yyyy-MM-dd';
|
||||
this.now = now || Date.now;
|
||||
|
||||
if (fs.existsSync(filename)) {
|
||||
var stat = fs.statSync(filename);
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, stat.mtime);
|
||||
} else {
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||
}
|
||||
this.lastTimeWeWroteSomething = format.asString(
|
||||
this.pattern,
|
||||
findTimestampFromFileIfExists(filename, this.now)
|
||||
);
|
||||
|
||||
this.baseFilename = filename;
|
||||
this.alwaysIncludePattern = false;
|
||||
|
||||
|
||||
if (options) {
|
||||
if (options.alwaysIncludePattern) {
|
||||
this.alwaysIncludePattern = true;
|
||||
filename = this.baseFilename + this.lastTimeWeWroteSomething;
|
||||
}
|
||||
delete options.alwaysIncludePattern;
|
||||
if (Object.keys(options).length === 0) {
|
||||
options = null;
|
||||
if (Object.keys(options).length === 0) {
|
||||
options = null;
|
||||
}
|
||||
}
|
||||
debug("this.now is " + this.now + ", now is " + now);
|
||||
|
||||
|
||||
DateRollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||
@ -46,13 +47,13 @@ util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||
DateRollingFileStream.prototype.shouldRoll = function() {
|
||||
var lastTime = this.lastTimeWeWroteSomething,
|
||||
thisTime = format.asString(this.pattern, new Date(this.now()));
|
||||
|
||||
debug("DateRollingFileStream.shouldRoll with now = " +
|
||||
|
||||
debug("DateRollingFileStream.shouldRoll with now = " +
|
||||
this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
|
||||
|
||||
|
||||
this.lastTimeWeWroteSomething = thisTime;
|
||||
this.previousTime = lastTime;
|
||||
|
||||
|
||||
return thisTime !== lastTime;
|
||||
};
|
||||
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
"use strict";
|
||||
exports.RollingFileStream = require('./RollingFileStream');
|
||||
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
"node": ">=0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "vows --spec"
|
||||
"test": "vows"
|
||||
},
|
||||
"directories": {
|
||||
"test": "test",
|
||||
|
||||
@ -14,18 +14,18 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
var loggingEvents = [];
|
||||
var onChildProcessForked;
|
||||
var onMasterReceiveChildMessage;
|
||||
|
||||
// Fake cluster module, so no real cluster listeners be really added
|
||||
|
||||
// Fake cluster module, so no real cluster listeners be really added
|
||||
var fakeCluster = {
|
||||
|
||||
|
||||
on: function(event, callback) {
|
||||
registeredClusterEvents.push(event);
|
||||
onChildProcessForked = callback;
|
||||
},
|
||||
|
||||
|
||||
isMaster: true,
|
||||
isWorker: false,
|
||||
|
||||
|
||||
};
|
||||
var fakeWorker = {
|
||||
on: function(event, callback) {
|
||||
@ -36,18 +36,18 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
},
|
||||
id: 'workerid'
|
||||
};
|
||||
|
||||
|
||||
var fakeActualAppender = function(loggingEvent) {
|
||||
loggingEvents.push(loggingEvent);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
var masterAppender = appenderModule.appender({
|
||||
actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
|
||||
appenders: [{}, {category: "test"}, {category: "wovs"}]
|
||||
@ -59,7 +59,14 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
// Simulate a 'fork' event to register the master's message handler on our fake worker.
|
||||
onChildProcessForked(fakeWorker);
|
||||
// Simulate a cluster message received by the masterAppender.
|
||||
var simulatedLoggingEvent = new LoggingEvent('wovs', 'Error', ['message deserialization test', {stack: 'my wrapped stack'}]);
|
||||
var simulatedLoggingEvent = new LoggingEvent(
|
||||
'wovs',
|
||||
'Error',
|
||||
[
|
||||
'message deserialization test',
|
||||
{stack: 'my wrapped stack'}
|
||||
]
|
||||
);
|
||||
onMasterReceiveChildMessage({
|
||||
type : '::log-message',
|
||||
event : JSON.stringify(simulatedLoggingEvent)
|
||||
@ -69,14 +76,14 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
registeredClusterEvents: registeredClusterEvents,
|
||||
loggingEvents: loggingEvents,
|
||||
};
|
||||
|
||||
|
||||
return returnValue;
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
assert.equal(topic.registeredClusterEvents[0], 'fork');
|
||||
},
|
||||
|
||||
|
||||
"should log using actual appender": function(topic) {
|
||||
assert.equal(topic.loggingEvents.length, 4);
|
||||
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
|
||||
@ -86,31 +93,31 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
assert.equal(topic.loggingEvents[3].data[0], 'message deserialization test');
|
||||
assert.equal(topic.loggingEvents[3].data[1], 'my wrapped stack');
|
||||
},
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
'when in worker mode': {
|
||||
|
||||
|
||||
topic: function() {
|
||||
|
||||
|
||||
var registeredProcessEvents = [];
|
||||
|
||||
|
||||
// Fake cluster module, to fake we're inside a worker process
|
||||
var fakeCluster = {
|
||||
|
||||
|
||||
isMaster: false,
|
||||
isWorker: true,
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
var fakeProcess = {
|
||||
|
||||
|
||||
send: function(data) {
|
||||
registeredProcessEvents.push(data);
|
||||
},
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
@ -120,34 +127,40 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
'process': fakeProcess,
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
var workerAppender = appenderModule.appender();
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
|
||||
|
||||
|
||||
var returnValue = {
|
||||
registeredProcessEvents: registeredProcessEvents,
|
||||
};
|
||||
|
||||
|
||||
return returnValue;
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
"worker appender should call process.send" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
|
||||
assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test");
|
||||
assert.equal(
|
||||
JSON.parse(topic.registeredProcessEvents[0].event).data[0],
|
||||
"workerAppender test"
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
"worker should serialize an Error correctly" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
|
||||
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
|
||||
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
assert(
|
||||
actual.match(expectedRegex),
|
||||
"Expected: \n\n " + actual + "\n\n to match " + expectedRegex
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
|
||||
@ -253,9 +253,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url :custom_string', tokens: [{
|
||||
token: ':custom_string', replacement: 'fooBAR'
|
||||
}] } );
|
||||
var cl = clm.connectLogger(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :custom_string',
|
||||
tokens: [{
|
||||
token: ':custom_string', replacement: 'fooBAR'
|
||||
}]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
@ -275,9 +279,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url :date', tokens: [{
|
||||
token: ':date', replacement: "20150310"
|
||||
}] } );
|
||||
var cl = clm.connectLogger(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :date',
|
||||
tokens: [{
|
||||
token: ':date', replacement: "20150310"
|
||||
}]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
|
||||
@ -24,15 +24,15 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
var listenersCount = process.listeners('exit').length
|
||||
, logger = log4js.getLogger('default-settings')
|
||||
, count = 5, logfile;
|
||||
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
|
||||
}
|
||||
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
|
||||
|
||||
'does not add more than one `exit` listeners': function (initialCount) {
|
||||
assert.ok(process.listeners('exit').length <= initialCount + 1);
|
||||
}
|
||||
@ -56,7 +56,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
'../streams': {
|
||||
RollingFileStream: function(filename) {
|
||||
openedFiles.push(filename);
|
||||
|
||||
|
||||
this.end = function() {
|
||||
openedFiles.shift();
|
||||
};
|
||||
@ -64,7 +64,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
this.on = function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
for (var i=0; i < 5; i += 1) {
|
||||
@ -78,7 +78,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
assert.isEmpty(openedFiles);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'with default fileAppender settings': {
|
||||
topic: function() {
|
||||
var that = this
|
||||
@ -88,9 +88,9 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
|
||||
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
@ -100,7 +100,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
@ -112,59 +112,78 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
log4js.clearAppenders();
|
||||
|
||||
function addAppender(cat) {
|
||||
var testFile = path.join(__dirname, '/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log');
|
||||
var testFile = path.join(
|
||||
__dirname,
|
||||
'/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
|
||||
);
|
||||
remove(testFile);
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat);
|
||||
return testFile;
|
||||
}
|
||||
|
||||
var file_sub1 = addAppender([ 'sub1']);
|
||||
|
||||
|
||||
var file_sub1_sub12$sub1_sub13 = addAppender([ 'sub1.sub12', 'sub1.sub13' ]);
|
||||
|
||||
|
||||
var file_sub1_sub12 = addAppender([ 'sub1.sub12' ]);
|
||||
|
||||
|
||||
|
||||
var logger_sub1_sub12_sub123 = log4js.getLogger('sub1.sub12.sub123');
|
||||
|
||||
|
||||
var logger_sub1_sub13_sub133 = log4js.getLogger('sub1.sub13.sub133');
|
||||
|
||||
var logger_sub1_sub14 = log4js.getLogger('sub1.sub14');
|
||||
|
||||
var logger_sub2 = log4js.getLogger('sub2');
|
||||
|
||||
|
||||
|
||||
logger_sub1_sub12_sub123.info('sub1_sub12_sub123');
|
||||
|
||||
|
||||
logger_sub1_sub13_sub133.info('sub1_sub13_sub133');
|
||||
|
||||
logger_sub1_sub14.info('sub1_sub14');
|
||||
|
||||
logger_sub2.info('sub2');
|
||||
|
||||
|
||||
|
||||
|
||||
setTimeout(function() {
|
||||
that.callback(null, {
|
||||
file_sub1: fs.readFileSync(file_sub1).toString(),
|
||||
file_sub1_sub12$sub1_sub13: fs.readFileSync(file_sub1_sub12$sub1_sub13).toString(),
|
||||
file_sub1_sub12: fs.readFileSync(file_sub1_sub12).toString()
|
||||
});
|
||||
});
|
||||
}, 3000);
|
||||
},
|
||||
'check file contents': function (err, fileContents) {
|
||||
|
||||
// everything but category 'sub2'
|
||||
assert.match(fileContents.file_sub1, /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/);
|
||||
assert.ok(fileContents.file_sub1.match(/sub123/) && fileContents.file_sub1.match(/sub133/) && fileContents.file_sub1.match(/sub14/));
|
||||
assert.match(
|
||||
fileContents.file_sub1,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/
|
||||
);
|
||||
assert.ok(
|
||||
fileContents.file_sub1.match(/sub123/) &&
|
||||
fileContents.file_sub1.match(/sub133/) &&
|
||||
fileContents.file_sub1.match(/sub14/)
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1.match(/sub2/));
|
||||
|
||||
// only catgories starting with 'sub1.sub12' and 'sub1.sub13'
|
||||
assert.match(fileContents.file_sub1_sub12$sub1_sub13, /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/);
|
||||
assert.ok(fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) && fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/));
|
||||
assert.match(
|
||||
fileContents.file_sub1_sub12$sub1_sub13,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/
|
||||
);
|
||||
assert.ok(
|
||||
fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) &&
|
||||
fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/)
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1_sub12$sub1_sub13.match(/sub14|sub2/));
|
||||
|
||||
// only catgories starting with 'sub1.sub12'
|
||||
assert.match(fileContents.file_sub1_sub12, /^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/);
|
||||
assert.match(
|
||||
fileContents.file_sub1_sub12,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1_sub12.match(/sub14|sub2|sub13/));
|
||||
|
||||
}
|
||||
@ -179,7 +198,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -214,11 +233,11 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -228,11 +247,11 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 200);
|
||||
@ -249,8 +268,8 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log.2'
|
||||
]);
|
||||
},
|
||||
@ -287,11 +306,13 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
remove(testFile);
|
||||
remove(testFile+'.1.gz');
|
||||
remove(testFile+'.2.gz');
|
||||
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2, true),
|
||||
require('../lib/appenders/file').appender(
|
||||
testFile, log4js.layouts.basicLayout, 50, 2, true
|
||||
),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -301,11 +322,11 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
@ -313,7 +334,9 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-with-backups-compressed-test.log') > -1; }
|
||||
function(file) {
|
||||
return file.indexOf('fa-maxFileSize-with-backups-compressed-test.log') > -1;
|
||||
}
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
@ -322,8 +345,8 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
|
||||
]);
|
||||
},
|
||||
@ -365,7 +388,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
|
||||
fs.readFile('tmp-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function (err, contents) {
|
||||
@ -392,7 +415,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
requires: {
|
||||
'../streams': {
|
||||
RollingFileStream: function(filename) {
|
||||
|
||||
|
||||
this.end = function() {};
|
||||
this.on = function(evt, cb) {
|
||||
if (evt === 'error') {
|
||||
@ -401,7 +424,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.appender('test1.log', null, 100);
|
||||
|
||||
@ -26,10 +26,13 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
remove(testFile);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/fileSync').appender(testFile), 'default-settings');
|
||||
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/fileSync').appender(testFile),
|
||||
'default-settings'
|
||||
);
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
},
|
||||
'should write log messages to the file': function (err, fileContents) {
|
||||
@ -37,7 +40,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
@ -52,7 +55,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -85,11 +88,11 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -97,12 +100,12 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
},
|
||||
@ -118,8 +121,8 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.2'
|
||||
]);
|
||||
},
|
||||
@ -158,19 +161,19 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
//this config defines one file appender (to ./tmp-sync-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure({
|
||||
appenders: [{
|
||||
category: "tests",
|
||||
type: "file",
|
||||
filename: "tmp-sync-tests.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
appenders: [{
|
||||
category: "tests",
|
||||
type: "file",
|
||||
filename: "tmp-sync-tests.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
}],
|
||||
|
||||
|
||||
levels: { tests: "WARN" }
|
||||
});
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
|
||||
fs.readFile('tmp-sync-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
|
||||
@ -28,7 +28,7 @@ function setupLogging(category, options) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return constructor(key);
|
||||
@ -179,4 +179,3 @@ vows.describe('log4js hipchatAppender').addBatch({
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
|
||||
@ -50,13 +50,13 @@ vows.describe('levels').addBatch({
|
||||
topic: levels.ALL,
|
||||
'should be less than the other levels': function(all) {
|
||||
assertThat(all).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
@ -66,12 +66,12 @@ vows.describe('levels').addBatch({
|
||||
'should be greater than no levels': function(all) {
|
||||
assertThat(all).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -81,12 +81,12 @@ vows.describe('levels').addBatch({
|
||||
assertThat(all).isEqualTo([levels.toLevel("ALL")]);
|
||||
assertThat(all).isNotEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -98,11 +98,11 @@ vows.describe('levels').addBatch({
|
||||
'should be less than DEBUG': function(trace) {
|
||||
assertThat(trace).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -113,11 +113,11 @@ vows.describe('levels').addBatch({
|
||||
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(trace).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -127,12 +127,12 @@ vows.describe('levels').addBatch({
|
||||
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -144,10 +144,10 @@ vows.describe('levels').addBatch({
|
||||
'should be less than INFO': function(debug) {
|
||||
assertThat(debug).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -158,10 +158,10 @@ vows.describe('levels').addBatch({
|
||||
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(debug).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -171,12 +171,12 @@ vows.describe('levels').addBatch({
|
||||
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
@ -187,9 +187,9 @@ vows.describe('levels').addBatch({
|
||||
topic: levels.INFO,
|
||||
'should be less than WARN': function(info) {
|
||||
assertThat(info).isLessThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
@ -198,9 +198,9 @@ vows.describe('levels').addBatch({
|
||||
'should be greater than DEBUG': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
@ -208,12 +208,12 @@ vows.describe('levels').addBatch({
|
||||
'should only be equal to INFO': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
@ -224,30 +224,32 @@ vows.describe('levels').addBatch({
|
||||
'should be less than ERROR': function(warn) {
|
||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(warn).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
},
|
||||
'should be greater than INFO': function(warn) {
|
||||
assertThat(warn).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([
|
||||
levels.ERROR, levels.FATAL, levels.MARK, levels.OFF
|
||||
]);
|
||||
},
|
||||
'should only be equal to WARN': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -257,19 +259,19 @@ vows.describe('levels').addBatch({
|
||||
'should be less than FATAL': function(error) {
|
||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(error).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
},
|
||||
'should be greater than WARN': function(error) {
|
||||
assertThat(error).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
@ -277,12 +279,12 @@ vows.describe('levels').addBatch({
|
||||
'should only be equal to ERROR': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
@ -293,21 +295,21 @@ vows.describe('levels').addBatch({
|
||||
'should be less than OFF': function(fatal) {
|
||||
assertThat(fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
assertThat(fatal).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than ERROR': function(fatal) {
|
||||
assertThat(fatal).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
@ -315,13 +317,13 @@ vows.describe('levels').addBatch({
|
||||
'should only be equal to FATAL': function(fatal) {
|
||||
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
||||
assertThat(fatal).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.MARK,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -331,22 +333,22 @@ vows.describe('levels').addBatch({
|
||||
'should be less than OFF': function(mark) {
|
||||
assertThat(mark).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(mark).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than FATAL': function(mark) {
|
||||
assertThat(mark).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
]);
|
||||
@ -355,13 +357,13 @@ vows.describe('levels').addBatch({
|
||||
'should only be equal to MARK': function(mark) {
|
||||
assertThat(mark).isEqualTo([levels.toLevel("MARK")]);
|
||||
assertThat(mark).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -370,24 +372,24 @@ vows.describe('levels').addBatch({
|
||||
topic: levels.OFF,
|
||||
'should not be less than anything': function(off) {
|
||||
assertThat(off).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
},
|
||||
'should be greater than everything': function(off) {
|
||||
assertThat(off).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
@ -395,12 +397,12 @@ vows.describe('levels').addBatch({
|
||||
'should only be equal to OFF': function(off) {
|
||||
assertThat(off).isEqualTo([levels.toLevel("OFF")]);
|
||||
assertThat(off).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
|
||||
@ -7,7 +7,7 @@ function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
@ -15,7 +15,7 @@ function setupConsoleTest() {
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
'../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
@ -64,7 +64,10 @@ vows.describe('log4js').addBatch({
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
logger.error(
|
||||
"Simulated CouchDB problem",
|
||||
{ err: 127, cause: "incendiary underwear" }
|
||||
);
|
||||
return events;
|
||||
},
|
||||
|
||||
@ -86,7 +89,10 @@ vows.describe('log4js').addBatch({
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
logger.error(
|
||||
"Simulated CouchDB problem",
|
||||
{ err: 127, cause: "incendiary underwear" }
|
||||
);
|
||||
logger.flush();
|
||||
return events;
|
||||
},
|
||||
@ -95,9 +101,9 @@ vows.describe('log4js').addBatch({
|
||||
assert.equal(events.length, 6);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
'getLogger': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
@ -106,7 +112,7 @@ vows.describe('log4js').addBatch({
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
|
||||
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
@ -129,18 +135,18 @@ vows.describe('log4js').addBatch({
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
return events;
|
||||
},
|
||||
|
||||
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
|
||||
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
|
||||
'should include the error if passed in': function(events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
@ -201,7 +207,7 @@ vows.describe('log4js').addBatch({
|
||||
assert.ok(events.shutdownCallbackCalled);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'invalid configuration': {
|
||||
'should throw an exception': function() {
|
||||
assert.throws(function() {
|
||||
@ -209,15 +215,15 @@ vows.describe('log4js').addBatch({
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'configuration when passed as object': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
var appenderConfig,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
@ -228,8 +234,8 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
@ -248,10 +254,10 @@ vows.describe('log4js').addBatch({
|
||||
'configuration that causes an error': {
|
||||
topic: function() {
|
||||
var log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
@ -261,8 +267,8 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
@ -274,7 +280,7 @@ vows.describe('log4js').addBatch({
|
||||
log4js.configure(config);
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
},
|
||||
'should wrap error in a meaningful message': function(e) {
|
||||
assert.ok(e.message.indexOf('log4js configuration problem for') > -1);
|
||||
@ -283,17 +289,17 @@ vows.describe('log4js').addBatch({
|
||||
|
||||
'configuration when passed as filename': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
configFilename,
|
||||
var appenderConfig,
|
||||
configFilename,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
'../lib/log4js',
|
||||
{ requires:
|
||||
{ 'fs':
|
||||
{ statSync:
|
||||
{ statSync:
|
||||
function() {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
readFileSync:
|
||||
readFileSync:
|
||||
function(filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
@ -304,14 +310,14 @@ vows.describe('log4js').addBatch({
|
||||
]
|
||||
});
|
||||
},
|
||||
readdirSync:
|
||||
readdirSync:
|
||||
function() {
|
||||
return ['file'];
|
||||
}
|
||||
},
|
||||
'./appenders/file':
|
||||
{ name: "file",
|
||||
appender: function() {},
|
||||
},
|
||||
'./appenders/file':
|
||||
{ name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
@ -333,21 +339,21 @@ vows.describe('log4js').addBatch({
|
||||
|
||||
'with no appenders defined' : {
|
||||
topic: function() {
|
||||
var logger,
|
||||
that = this,
|
||||
var logger,
|
||||
that = this,
|
||||
fakeConsoleAppender = {
|
||||
name: "console",
|
||||
name: "console",
|
||||
appender: function() {
|
||||
return function(evt) {
|
||||
that.callback(null, evt);
|
||||
};
|
||||
},
|
||||
},
|
||||
configure: function() {
|
||||
return fakeConsoleAppender.appender();
|
||||
}
|
||||
},
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/console': fakeConsoleAppender
|
||||
@ -370,8 +376,8 @@ vows.describe('log4js').addBatch({
|
||||
},
|
||||
'without a category': {
|
||||
'should register the function as a listener for all loggers': function (log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender);
|
||||
@ -382,19 +388,19 @@ vows.describe('log4js').addBatch({
|
||||
},
|
||||
'if an appender for a category is defined': {
|
||||
'should register for that category': function (log4js) {
|
||||
var otherEvent,
|
||||
appenderEvent,
|
||||
var otherEvent,
|
||||
appenderEvent,
|
||||
cheeseLogger;
|
||||
|
||||
|
||||
log4js.addAppender(function (evt) { appenderEvent = evt; });
|
||||
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
|
||||
|
||||
|
||||
cheeseLogger = log4js.getLogger('cheese');
|
||||
cheeseLogger.debug('This is a test');
|
||||
assert.deepEqual(appenderEvent, otherEvent);
|
||||
assert.equal(otherEvent.data[0], 'This is a test');
|
||||
assert.equal(otherEvent.categoryName, 'cheese');
|
||||
|
||||
|
||||
otherEvent = undefined;
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
|
||||
@ -403,58 +409,58 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'with a category': {
|
||||
'should only register the function as a listener for that category': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender, 'tests');
|
||||
logger.debug('this is a category test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a category test');
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('some other category').debug('Cheese');
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'with multiple categories': {
|
||||
'should register the function as a listener for all the categories': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger('tests');
|
||||
|
||||
log4js.addAppender(appender, 'tests', 'biscuits');
|
||||
|
||||
|
||||
logger.debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
appenderEvent = undefined;
|
||||
|
||||
|
||||
var otherLogger = log4js.getLogger('biscuits');
|
||||
otherLogger.debug("mmm... garibaldis");
|
||||
assert.equal(appenderEvent.data[0], "mmm... garibaldis");
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
},
|
||||
'should register the function when the list of categories is an array': function(log4js) {
|
||||
var appenderEvent,
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; };
|
||||
|
||||
log4js.addAppender(appender, ['tests', 'pants']);
|
||||
|
||||
|
||||
log4js.getLogger('tests').debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
|
||||
log4js.getLogger('pants').debug("big pants");
|
||||
assert.equal(appenderEvent.data[0], "big pants");
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
@ -462,17 +468,17 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'default setup': {
|
||||
topic: function() {
|
||||
var appenderEvents = [],
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) {
|
||||
appenderEvents.push(evt);
|
||||
};
|
||||
},
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
@ -492,43 +498,43 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
),
|
||||
logger = log4js.getLogger('a-test');
|
||||
|
||||
|
||||
logger.debug("this is a test");
|
||||
globalConsole.log("this should not be logged");
|
||||
|
||||
|
||||
return appenderEvents;
|
||||
},
|
||||
|
||||
|
||||
'should configure a console appender': function(appenderEvents) {
|
||||
assert.equal(appenderEvents[0].data[0], 'this is a test');
|
||||
},
|
||||
|
||||
|
||||
'should not replace console.log with log4js version': function(appenderEvents) {
|
||||
assert.equal(appenderEvents.length, 1);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'console' : {
|
||||
topic: setupConsoleTest,
|
||||
|
||||
|
||||
'when replaceConsole called': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
|
||||
|
||||
test.fakeConsole.log("Some debug message someone put in a module");
|
||||
test.fakeConsole.debug("Some debug");
|
||||
test.fakeConsole.error("An error");
|
||||
test.fakeConsole.info("some info");
|
||||
test.fakeConsole.warn("a warning");
|
||||
|
||||
|
||||
test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
test.fakeConsole.log({ lumpy: "tapioca" });
|
||||
test.fakeConsole.log("count %d", 123);
|
||||
test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents.length, 9);
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
@ -590,7 +596,7 @@ vows.describe('log4js').addBatch({
|
||||
test.fakeConsole.debug("Some debug");
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
|
||||
'should allow for turning on console replacement': function (logEvents) {
|
||||
assert.equal(logEvents.length, 1);
|
||||
assert.equal(logEvents[0].level.toString(), "DEBUG");
|
||||
@ -603,13 +609,13 @@ vows.describe('log4js').addBatch({
|
||||
var logEvent,
|
||||
firstLog4js = require('../lib/log4js'),
|
||||
secondLog4js;
|
||||
|
||||
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
|
||||
secondLog4js = require('../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
|
||||
return logEvent;
|
||||
},
|
||||
'should maintain appenders between requires': function (logEvent) {
|
||||
|
||||
@ -66,7 +66,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
|
||||
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
@ -76,7 +76,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
fakeNet.cbs.connect();
|
||||
appender('after error, after connect');
|
||||
appender(new Error('Error test'));
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to the loggerPort and loggerHost': function(net) {
|
||||
@ -101,10 +101,16 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
},
|
||||
'should serialize an Error correctly': function(net) {
|
||||
assert(JSON.parse(net.data[8]).stack, "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property");
|
||||
assert(
|
||||
JSON.parse(net.data[8]).stack,
|
||||
"Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property"
|
||||
);
|
||||
var actual = JSON.parse(net.data[8]).stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
assert(
|
||||
actual.match(expectedRegex),
|
||||
"Expected: \n\n " + actual + "\n\n to match " + expectedRegex
|
||||
);
|
||||
|
||||
}
|
||||
},
|
||||
@ -119,7 +125,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
@ -130,7 +136,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
appender('after close, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after close, after connect');
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should attempt to re-open the socket': function(net) {
|
||||
@ -154,7 +160,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to localhost:5000': function(net) {
|
||||
@ -177,9 +183,9 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
loggerPort: 1234,
|
||||
actualAppender: fakeNet.fakeAppender.bind(fakeNet)
|
||||
});
|
||||
|
||||
|
||||
appender('this should be sent to the actual appender directly');
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on loggerPort and loggerHost': function(net) {
|
||||
@ -195,7 +201,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
{ level: { level: 10000, levelStr: 'DEBUG' }
|
||||
, data: ['some debug']}
|
||||
) + '__LOG4JS__';
|
||||
|
||||
|
||||
net.cbs.data(
|
||||
JSON.stringify(
|
||||
{ level: { level: 40000, levelStr: 'ERROR' }
|
||||
@ -253,7 +259,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'master' });
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on localhost:5000': function(net) {
|
||||
@ -296,7 +302,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
);
|
||||
|
||||
return results;
|
||||
|
||||
|
||||
},
|
||||
'should load underlying appender for master': function(results) {
|
||||
assert.equal(results.appenderLoaded, 'madeupappender');
|
||||
|
||||
@ -23,7 +23,7 @@ function MockLogger() {
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl) {
|
||||
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
@ -70,7 +70,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
},10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -93,7 +93,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
},10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -107,7 +107,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
@ -115,10 +115,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -138,10 +138,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -154,10 +154,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -170,7 +170,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
@ -178,10 +178,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -201,10 +201,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -218,10 +218,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -242,10 +242,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -265,10 +265,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -282,10 +282,10 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages)
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
|
||||
@ -22,7 +22,7 @@ function setupLogging(category, options) {
|
||||
msgs.push(data);
|
||||
callback(false, {status: "sent"});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return constructor(key);
|
||||
@ -166,4 +166,3 @@ vows.describe('log4js slackAppender').addBatch({
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
|
||||
@ -196,7 +196,10 @@ vows.describe('log4js smtpAppender').addBatch({
|
||||
'messages should contain proper data': function (result) {
|
||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[0].subject, 'Log event #1');
|
||||
assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
|
||||
assert.equal(
|
||||
result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length,
|
||||
2
|
||||
);
|
||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[1].subject, 'Log event #3');
|
||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
|
||||
|
||||
@ -14,9 +14,9 @@ vows.describe('subcategories').addBatch({
|
||||
"sub1": "WARN",
|
||||
"sub1.sub11": "TRACE",
|
||||
"sub1.sub11.sub111": "WARN",
|
||||
"sub1.sub12": "INFO"
|
||||
"sub1.sub12": "INFO"
|
||||
}
|
||||
}, { reloadSecs: 30 })
|
||||
}, { reloadSecs: 30 });
|
||||
|
||||
return {
|
||||
"sub1": log4js.getLogger('sub1'), // WARN
|
||||
@ -39,7 +39,7 @@ vows.describe('subcategories').addBatch({
|
||||
assert.equal(loggers.sub13.level, levels.WARN);
|
||||
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||
assert.equal(loggers.sub121.level, levels.INFO);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
'loggers created before levels configuration is loaded': {
|
||||
@ -63,9 +63,9 @@ vows.describe('subcategories').addBatch({
|
||||
"sub1": "WARN",
|
||||
"sub1.sub11": "TRACE",
|
||||
"sub1.sub11.sub111": "WARN",
|
||||
"sub1.sub12": "INFO"
|
||||
"sub1.sub12": "INFO"
|
||||
}
|
||||
}, { reloadSecs: 30 })
|
||||
}, { reloadSecs: 30 });
|
||||
|
||||
return loggers;
|
||||
|
||||
@ -80,7 +80,7 @@ vows.describe('subcategories').addBatch({
|
||||
assert.equal(loggers.sub13.level, levels.WARN);
|
||||
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||
assert.equal(loggers.sub121.level, levels.INFO);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user