mirror of
https://github.com/log4js-node/log4js-node.git
synced 2025-12-08 19:26:01 +00:00
Merge pull request #925 from log4js-node/coverage
Coverage improvements
This commit is contained in:
commit
5545f2a932
30
.eslintrc
30
.eslintrc
@ -1,21 +1,19 @@
|
||||
{
|
||||
"root": true,
|
||||
"extends": "airbnb-base",
|
||||
"rules": {
|
||||
"comma-dangle": 0,
|
||||
"indent": 2,
|
||||
"object-shorthand": 0,
|
||||
"func-names": 0,
|
||||
"max-len": [1, 120, 2],
|
||||
"no-use-before-define": ["warn"],
|
||||
"no-param-reassign": 0,
|
||||
"strict": 0,
|
||||
"import/no-extraneous-dependencies": 1,
|
||||
"prefer-spread": 0,
|
||||
"prefer-rest-params": 0,
|
||||
"prefer-destructuring": 0
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 6
|
||||
"ecmaVersion": 2018,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"extends": ["airbnb-base", "prettier"],
|
||||
"plugins": ["prettier", "import"],
|
||||
"rules": {
|
||||
"comma-dangle": 0,
|
||||
"indent": 2,
|
||||
"func-names": 0,
|
||||
"max-len": [1, 120, 2],
|
||||
"no-use-before-define": ["warn"],
|
||||
"no-param-reassign": 0,
|
||||
"strict": 1,
|
||||
"import/no-extraneous-dependencies": 1
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
function maxFileSizeUnitTransform(maxLogSize) {
|
||||
if (typeof maxLogSize === 'number' && Number.isInteger(maxLogSize)) {
|
||||
return maxLogSize;
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
const debug = require('debug')('log4js:categoryFilter');
|
||||
|
||||
function categoryFilter(excludes, appender) {
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
const consoleLog = console.log.bind(console);
|
||||
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
const streams = require('streamroller');
|
||||
const os = require('os');
|
||||
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
const debug = require('debug')('log4js:file');
|
||||
const path = require('path');
|
||||
const streams = require('streamroller');
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
const debug = require('debug')('log4js:fileSync');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
function logLevelFilter(minLevelString, maxLevelString, appender, levels) {
|
||||
const minLevel = levels.getLevel(minLevelString);
|
||||
const maxLevel = levels.getLevel(maxLevelString, levels.FATAL);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const debug = require('debug')('log4js:multiFile');
|
||||
const path = require('path');
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const debug = require('debug')('log4js:multiprocess');
|
||||
const net = require('net');
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const debug = require('debug')('log4js:noLogFilter');
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const debug = require('debug')('log4js:recording');
|
||||
|
||||
@ -21,9 +21,9 @@ function reset() {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
configure: configure,
|
||||
replay: replay,
|
||||
configure,
|
||||
replay,
|
||||
playback: replay,
|
||||
reset: reset,
|
||||
reset,
|
||||
erase: reset
|
||||
};
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
function stderrAppender(layout, timezoneOffset) {
|
||||
return (loggingEvent) => {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
function stdoutAppender(layout, timezoneOffset) {
|
||||
return (loggingEvent) => {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const debug = require('debug')('log4js:tcp');
|
||||
const net = require('net');
|
||||
|
||||
@ -36,9 +36,7 @@ configuration.addListener((config) => {
|
||||
// clear out the listeners, because configure has been called.
|
||||
listeners.length = 0;
|
||||
|
||||
disabled = config.disableClustering;
|
||||
pm2 = config.pm2;
|
||||
pm2InstanceVar = config.pm2InstanceVar || 'NODE_APP_INSTANCE';
|
||||
({ pm2, disableClustering:disabled, pm2InstanceVar='NODE_APP_INSTANCE' } = config);
|
||||
|
||||
debug(`clustering disabled ? ${disabled}`);
|
||||
debug(`cluster.isMaster ? ${cluster.isMaster}`);
|
||||
@ -72,7 +70,7 @@ configuration.addListener((config) => {
|
||||
|
||||
module.exports = {
|
||||
onlyOnMaster: (fn, notMaster) => (isMaster() ? fn() : notMaster),
|
||||
isMaster: isMaster,
|
||||
isMaster,
|
||||
send: (msg) => {
|
||||
if (isMaster()) {
|
||||
sendToListeners(msg);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const util = require('util');
|
||||
const debug = require('debug')('log4js:configuration');
|
||||
|
||||
@ -1,13 +1,12 @@
|
||||
/* eslint-disable no-plusplus */
|
||||
|
||||
'use strict';
|
||||
const levels = require("./levels");
|
||||
|
||||
const levels = require('./levels');
|
||||
|
||||
const DEFAULT_FORMAT = ':remote-addr - -'
|
||||
+ ' ":method :url HTTP/:http-version"'
|
||||
+ ' :status :content-length ":referrer"'
|
||||
+ ' ":user-agent"';
|
||||
const DEFAULT_FORMAT =
|
||||
":remote-addr - -" +
|
||||
' ":method :url HTTP/:http-version"' +
|
||||
' :status :content-length ":referrer"' +
|
||||
' ":user-agent"';
|
||||
|
||||
/**
|
||||
* Return request url path,
|
||||
@ -22,19 +21,18 @@ function getUrl(req) {
|
||||
return req.originalUrl || req.url;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds custom {token, replacement} objects to defaults,
|
||||
* overwriting the defaults if any tokens clash
|
||||
*
|
||||
* @param {IncomingMessage} req
|
||||
* @param {ServerResponse} res
|
||||
* @param {Array} customTokens
|
||||
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
|
||||
* @return {Array}
|
||||
*/
|
||||
* Adds custom {token, replacement} objects to defaults,
|
||||
* overwriting the defaults if any tokens clash
|
||||
*
|
||||
* @param {IncomingMessage} req
|
||||
* @param {ServerResponse} res
|
||||
* @param {Array} customTokens
|
||||
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
|
||||
* @return {Array}
|
||||
*/
|
||||
function assembleTokens(req, res, customTokens) {
|
||||
const arrayUniqueTokens = (array) => {
|
||||
const arrayUniqueTokens = array => {
|
||||
const a = array.concat();
|
||||
for (let i = 0; i < a.length; ++i) {
|
||||
for (let j = i + 1; j < a.length; ++j) {
|
||||
@ -49,49 +47,61 @@ function assembleTokens(req, res, customTokens) {
|
||||
};
|
||||
|
||||
const defaultTokens = [];
|
||||
defaultTokens.push({ token: ':url', replacement: getUrl(req) });
|
||||
defaultTokens.push({ token: ':protocol', replacement: req.protocol });
|
||||
defaultTokens.push({ token: ':hostname', replacement: req.hostname });
|
||||
defaultTokens.push({ token: ':method', replacement: req.method });
|
||||
defaultTokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode });
|
||||
defaultTokens.push({ token: ':response-time', replacement: res.responseTime });
|
||||
defaultTokens.push({ token: ':date', replacement: new Date().toUTCString() });
|
||||
defaultTokens.push({ token: ":url", replacement: getUrl(req) });
|
||||
defaultTokens.push({ token: ":protocol", replacement: req.protocol });
|
||||
defaultTokens.push({ token: ":hostname", replacement: req.hostname });
|
||||
defaultTokens.push({ token: ":method", replacement: req.method });
|
||||
defaultTokens.push({
|
||||
token: ':referrer',
|
||||
replacement: req.headers.referer || req.headers.referrer || ''
|
||||
token: ":status",
|
||||
replacement: res.__statusCode || res.statusCode
|
||||
});
|
||||
defaultTokens.push({
|
||||
token: ':http-version',
|
||||
token: ":response-time",
|
||||
replacement: res.responseTime
|
||||
});
|
||||
defaultTokens.push({ token: ":date", replacement: new Date().toUTCString() });
|
||||
defaultTokens.push({
|
||||
token: ":referrer",
|
||||
replacement: req.headers.referer || req.headers.referrer || ""
|
||||
});
|
||||
defaultTokens.push({
|
||||
token: ":http-version",
|
||||
replacement: `${req.httpVersionMajor}.${req.httpVersionMinor}`
|
||||
});
|
||||
defaultTokens.push({
|
||||
token: ':remote-addr',
|
||||
replacement: req.headers['x-forwarded-for']
|
||||
|| req.ip
|
||||
|| req._remoteAddress
|
||||
|| (req.socket
|
||||
&& (req.socket.remoteAddress
|
||||
|| (req.socket.socket && req.socket.socket.remoteAddress)
|
||||
)
|
||||
)
|
||||
token: ":remote-addr",
|
||||
replacement:
|
||||
req.headers["x-forwarded-for"] ||
|
||||
req.ip ||
|
||||
req._remoteAddress ||
|
||||
(req.socket &&
|
||||
(req.socket.remoteAddress ||
|
||||
(req.socket.socket && req.socket.socket.remoteAddress)))
|
||||
});
|
||||
defaultTokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] });
|
||||
defaultTokens.push({
|
||||
token: ':content-length',
|
||||
replacement: res.getHeader('content-length')
|
||||
|| (res.__headers && res.__headers['Content-Length'])
|
||||
|| '-'
|
||||
token: ":user-agent",
|
||||
replacement: req.headers["user-agent"]
|
||||
});
|
||||
defaultTokens.push({
|
||||
token: ":content-length",
|
||||
replacement:
|
||||
res.getHeader("content-length") ||
|
||||
(res.__headers && res.__headers["Content-Length"]) ||
|
||||
"-"
|
||||
});
|
||||
defaultTokens.push({
|
||||
token: /:req\[([^\]]+)]/g,
|
||||
replacement: function (_, field) {
|
||||
replacement(_, field) {
|
||||
return req.headers[field.toLowerCase()];
|
||||
}
|
||||
});
|
||||
defaultTokens.push({
|
||||
token: /:res\[([^\]]+)]/g,
|
||||
replacement: function (_, field) {
|
||||
return res.getHeader(field.toLowerCase()) || (res.__headers && res.__headers[field]);
|
||||
replacement(_, field) {
|
||||
return (
|
||||
res.getHeader(field.toLowerCase()) ||
|
||||
(res.__headers && res.__headers[field])
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@ -99,13 +109,13 @@ function assembleTokens(req, res, customTokens) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return formatted log line.
|
||||
*
|
||||
* @param {String} str
|
||||
* @param {Array} tokens
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
* Return formatted log line.
|
||||
*
|
||||
* @param {String} str
|
||||
* @param {Array} tokens
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
function format(str, tokens) {
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
str = str.replace(tokens[i].token, tokens[i].replacement);
|
||||
@ -114,32 +124,32 @@ function format(str, tokens) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return RegExp Object about nolog
|
||||
*
|
||||
* @param {String|Array} nolog
|
||||
* @return {RegExp}
|
||||
* @api private
|
||||
*
|
||||
* syntax
|
||||
* 1. String
|
||||
* 1.1 "\\.gif"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
|
||||
* LOGGING http://example.com/hoge.agif
|
||||
* 1.2 in "\\.gif|\\.jpg$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and
|
||||
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
|
||||
* LOGGING http://example.com/hoge.agif,
|
||||
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
|
||||
* 1.3 in "\\.(gif|jpe?g|png)$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
|
||||
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
|
||||
* 2. RegExp
|
||||
* 2.1 in /\.(gif|jpe?g|png)$/
|
||||
* SAME AS 1.3
|
||||
* 3. Array
|
||||
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
|
||||
* SAME AS "\\.jpg|\\.png|\\.gif"
|
||||
*/
|
||||
* Return RegExp Object about nolog
|
||||
*
|
||||
* @param {String|Array} nolog
|
||||
* @return {RegExp}
|
||||
* @api private
|
||||
*
|
||||
* syntax
|
||||
* 1. String
|
||||
* 1.1 "\\.gif"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
|
||||
* LOGGING http://example.com/hoge.agif
|
||||
* 1.2 in "\\.gif|\\.jpg$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and
|
||||
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
|
||||
* LOGGING http://example.com/hoge.agif,
|
||||
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
|
||||
* 1.3 in "\\.(gif|jpe?g|png)$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
|
||||
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
|
||||
* 2. RegExp
|
||||
* 2.1 in /\.(gif|jpe?g|png)$/
|
||||
* SAME AS 1.3
|
||||
* 3. Array
|
||||
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
|
||||
* SAME AS "\\.jpg|\\.png|\\.gif"
|
||||
*/
|
||||
function createNoLogCondition(nolog) {
|
||||
let regexp = null;
|
||||
|
||||
@ -147,40 +157,40 @@ function createNoLogCondition(nolog) {
|
||||
regexp = nolog;
|
||||
}
|
||||
|
||||
if (typeof nolog === 'string') {
|
||||
if (typeof nolog === "string") {
|
||||
regexp = new RegExp(nolog);
|
||||
}
|
||||
|
||||
if (Array.isArray(nolog)) {
|
||||
// convert to strings
|
||||
const regexpsAsStrings = nolog.map(reg => (reg.source ? reg.source : reg));
|
||||
regexp = new RegExp(regexpsAsStrings.join('|'));
|
||||
regexp = new RegExp(regexpsAsStrings.join("|"));
|
||||
}
|
||||
|
||||
return regexp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows users to define rules around status codes to assign them to a specific
|
||||
* logging level.
|
||||
* There are two types of rules:
|
||||
* - RANGE: matches a code within a certain range
|
||||
* E.g. { 'from': 200, 'to': 299, 'level': 'info' }
|
||||
* - CONTAINS: matches a code to a set of expected codes
|
||||
* E.g. { 'codes': [200, 203], 'level': 'debug' }
|
||||
* Note*: Rules are respected only in order of prescendence.
|
||||
*
|
||||
* @param {Number} statusCode
|
||||
* @param {Level} currentLevel
|
||||
* @param {Object} ruleSet
|
||||
* @return {Level}
|
||||
* @api private
|
||||
*/
|
||||
* Allows users to define rules around status codes to assign them to a specific
|
||||
* logging level.
|
||||
* There are two types of rules:
|
||||
* - RANGE: matches a code within a certain range
|
||||
* E.g. { 'from': 200, 'to': 299, 'level': 'info' }
|
||||
* - CONTAINS: matches a code to a set of expected codes
|
||||
* E.g. { 'codes': [200, 203], 'level': 'debug' }
|
||||
* Note*: Rules are respected only in order of prescendence.
|
||||
*
|
||||
* @param {Number} statusCode
|
||||
* @param {Level} currentLevel
|
||||
* @param {Object} ruleSet
|
||||
* @return {Level}
|
||||
* @api private
|
||||
*/
|
||||
function matchRules(statusCode, currentLevel, ruleSet) {
|
||||
let level = currentLevel;
|
||||
|
||||
if (ruleSet) {
|
||||
const matchedRule = ruleSet.find((rule) => {
|
||||
const matchedRule = ruleSet.find(rule => {
|
||||
let ruleMatched = false;
|
||||
if (rule.from && rule.to) {
|
||||
ruleMatched = statusCode >= rule.from && statusCode <= rule.to;
|
||||
@ -197,38 +207,38 @@ function matchRules(statusCode, currentLevel, ruleSet) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Log requests with the given `options` or a `format` string.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `format` Format string, see below for tokens
|
||||
* - `level` A log4js levels instance. Supports also 'auto'
|
||||
* - `nolog` A string or RegExp to exclude target logs
|
||||
* - `statusRules` A array of rules for setting specific logging levels base on status codes
|
||||
* - `context` Whether to add a response of express to the context
|
||||
*
|
||||
* Tokens:
|
||||
*
|
||||
* - `:req[header]` ex: `:req[Accept]`
|
||||
* - `:res[header]` ex: `:res[Content-Length]`
|
||||
* - `:http-version`
|
||||
* - `:response-time`
|
||||
* - `:remote-addr`
|
||||
* - `:date`
|
||||
* - `:method`
|
||||
* - `:url`
|
||||
* - `:referrer`
|
||||
* - `:user-agent`
|
||||
* - `:status`
|
||||
*
|
||||
* @return {Function}
|
||||
* @param logger4js
|
||||
* @param options
|
||||
* @api public
|
||||
*/
|
||||
* Log requests with the given `options` or a `format` string.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `format` Format string, see below for tokens
|
||||
* - `level` A log4js levels instance. Supports also 'auto'
|
||||
* - `nolog` A string or RegExp to exclude target logs
|
||||
* - `statusRules` A array of rules for setting specific logging levels base on status codes
|
||||
* - `context` Whether to add a response of express to the context
|
||||
*
|
||||
* Tokens:
|
||||
*
|
||||
* - `:req[header]` ex: `:req[Accept]`
|
||||
* - `:res[header]` ex: `:res[Content-Length]`
|
||||
* - `:http-version`
|
||||
* - `:response-time`
|
||||
* - `:remote-addr`
|
||||
* - `:date`
|
||||
* - `:method`
|
||||
* - `:url`
|
||||
* - `:referrer`
|
||||
* - `:user-agent`
|
||||
* - `:status`
|
||||
*
|
||||
* @return {Function}
|
||||
* @param logger4js
|
||||
* @param options
|
||||
* @api public
|
||||
*/
|
||||
module.exports = function getLogger(logger4js, options) {
|
||||
/* eslint no-underscore-dangle:0 */
|
||||
if (typeof options === 'string' || typeof options === 'function') {
|
||||
if (typeof options === "string" || typeof options === "function") {
|
||||
options = { format: options };
|
||||
} else {
|
||||
options = options || {};
|
||||
@ -246,9 +256,9 @@ module.exports = function getLogger(logger4js, options) {
|
||||
// nologs
|
||||
if (nolog && nolog.test(req.originalUrl)) return next();
|
||||
|
||||
if (thisLogger.isLevelEnabled(level) || options.level === 'auto') {
|
||||
if (thisLogger.isLevelEnabled(level) || options.level === "auto") {
|
||||
const start = new Date();
|
||||
const writeHead = res.writeHead;
|
||||
const { writeHead } = res;
|
||||
|
||||
// flag as logging
|
||||
req._logging = true;
|
||||
@ -263,10 +273,10 @@ module.exports = function getLogger(logger4js, options) {
|
||||
};
|
||||
|
||||
// hook on end request to emit the log entry of the HTTP request.
|
||||
res.on('finish', () => {
|
||||
res.on("finish", () => {
|
||||
res.responseTime = new Date() - start;
|
||||
// status code response level handling
|
||||
if (res.statusCode && options.level === 'auto') {
|
||||
if (res.statusCode && options.level === "auto") {
|
||||
level = levels.INFO;
|
||||
if (res.statusCode >= 300) level = levels.WARN;
|
||||
if (res.statusCode >= 400) level = levels.ERROR;
|
||||
@ -275,14 +285,14 @@ module.exports = function getLogger(logger4js, options) {
|
||||
|
||||
const combinedTokens = assembleTokens(req, res, options.tokens || []);
|
||||
|
||||
if (options.context) thisLogger.addContext('res', res);
|
||||
if (typeof fmt === 'function') {
|
||||
if (options.context) thisLogger.addContext("res", res);
|
||||
if (typeof fmt === "function") {
|
||||
const line = fmt(req, res, str => format(str, combinedTokens));
|
||||
if (line) thisLogger.log(level, line);
|
||||
} else {
|
||||
thisLogger.log(level, format(fmt, combinedTokens));
|
||||
}
|
||||
if (options.context) thisLogger.removeContext('res');
|
||||
if (options.context) thisLogger.removeContext("res");
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const dateFormat = require('date-format');
|
||||
const os = require('os');
|
||||
const util = require('util');
|
||||
const path = require('path');
|
||||
|
||||
const eol = os.EOL || '\n';
|
||||
|
||||
const styles = {
|
||||
// styles
|
||||
bold: [1, 22],
|
||||
@ -71,10 +69,7 @@ function basicLayout(loggingEvent) {
|
||||
* same as basicLayout, but with colours.
|
||||
*/
|
||||
function colouredLayout(loggingEvent) {
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
loggingEvent.level.colour
|
||||
) + util.format(...loggingEvent.data);
|
||||
return timestampLevelAndCategory(loggingEvent, loggingEvent.level.colour) + util.format(...loggingEvent.data);
|
||||
}
|
||||
|
||||
function messagePassThroughLayout(loggingEvent) {
|
||||
@ -168,7 +163,7 @@ function patternLayout(pattern, tokens) {
|
||||
}
|
||||
|
||||
function endOfLine() {
|
||||
return eol;
|
||||
return os.EOL;
|
||||
}
|
||||
|
||||
function logLevel(loggingEvent) {
|
||||
@ -247,24 +242,24 @@ function patternLayout(pattern, tokens) {
|
||||
|
||||
/* eslint quote-props:0 */
|
||||
const replacers = {
|
||||
'c': categoryName,
|
||||
'd': formatAsDate,
|
||||
'h': hostname,
|
||||
'm': formatMessage,
|
||||
'n': endOfLine,
|
||||
'p': logLevel,
|
||||
'r': startTime,
|
||||
c: categoryName,
|
||||
d: formatAsDate,
|
||||
h: hostname,
|
||||
m: formatMessage,
|
||||
n: endOfLine,
|
||||
p: logLevel,
|
||||
r: startTime,
|
||||
'[': startColour,
|
||||
']': endColour,
|
||||
'y': clusterInfo,
|
||||
'z': pid,
|
||||
y: clusterInfo,
|
||||
z: pid,
|
||||
'%': percent,
|
||||
'x': userDefined,
|
||||
'X': contextDefined,
|
||||
'f': fileName,
|
||||
'l': lineNumber,
|
||||
'o': columnNumber,
|
||||
's': callStack,
|
||||
x: userDefined,
|
||||
X: contextDefined,
|
||||
f: fileName,
|
||||
l: lineNumber,
|
||||
o: columnNumber,
|
||||
s: callStack
|
||||
};
|
||||
|
||||
function replaceToken(conversionCharacter, loggingEvent, specifier) {
|
||||
@ -338,22 +333,22 @@ function patternLayout(pattern, tokens) {
|
||||
}
|
||||
|
||||
const layoutMakers = {
|
||||
messagePassThrough: function () {
|
||||
messagePassThrough () {
|
||||
return messagePassThroughLayout;
|
||||
},
|
||||
basic: function () {
|
||||
basic () {
|
||||
return basicLayout;
|
||||
},
|
||||
colored: function () {
|
||||
colored () {
|
||||
return colouredLayout;
|
||||
},
|
||||
coloured: function () {
|
||||
coloured () {
|
||||
return colouredLayout;
|
||||
},
|
||||
pattern: function (config) {
|
||||
pattern (config) {
|
||||
return patternLayout(config && config.pattern, config && config.tokens);
|
||||
},
|
||||
dummy: function () {
|
||||
dummy () {
|
||||
return dummyLayout;
|
||||
}
|
||||
};
|
||||
@ -365,10 +360,10 @@ module.exports = {
|
||||
colouredLayout,
|
||||
coloredLayout: colouredLayout,
|
||||
dummyLayout,
|
||||
addLayout: function (name, serializerGenerator) {
|
||||
addLayout (name, serializerGenerator) {
|
||||
layoutMakers[name] = serializerGenerator;
|
||||
},
|
||||
layout: function (name, config) {
|
||||
layout (name, config) {
|
||||
return layoutMakers[name] && layoutMakers[name](config);
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
|
||||
const configuration = require('./configuration');
|
||||
|
||||
@ -39,11 +39,7 @@ class Level {
|
||||
sArg = sArg.levelStr;
|
||||
}
|
||||
|
||||
if (typeof sArg === 'string') {
|
||||
return Level[sArg.toUpperCase()] || defaultLevel;
|
||||
}
|
||||
|
||||
return Level.getLevel(sArg.toString());
|
||||
return Level[sArg.toString().toUpperCase()] || defaultLevel;
|
||||
}
|
||||
|
||||
static addLevels(customLevels) {
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* @fileoverview log4js is a library to log in JavaScript in similar manner
|
||||
* than in log4j for Java (but not really).
|
||||
@ -20,41 +18,47 @@
|
||||
* @since 2005-05-20
|
||||
* Website: http://log4js.berlios.de
|
||||
*/
|
||||
const debug = require('debug')('log4js:main');
|
||||
const fs = require('fs');
|
||||
const deepClone = require('rfdc')({ proto: true });
|
||||
const configuration = require('./configuration');
|
||||
const layouts = require('./layouts');
|
||||
const levels = require('./levels');
|
||||
const appenders = require('./appenders');
|
||||
const categories = require('./categories');
|
||||
const Logger = require('./logger');
|
||||
const clustering = require('./clustering');
|
||||
const connectLogger = require('./connect-logger');
|
||||
const debug = require("debug")("log4js:main");
|
||||
const fs = require("fs");
|
||||
const deepClone = require("rfdc")({ proto: true });
|
||||
const configuration = require("./configuration");
|
||||
const layouts = require("./layouts");
|
||||
const levels = require("./levels");
|
||||
const appenders = require("./appenders");
|
||||
const categories = require("./categories");
|
||||
const Logger = require("./logger");
|
||||
const clustering = require("./clustering");
|
||||
const connectLogger = require("./connect-logger");
|
||||
|
||||
let enabled = false;
|
||||
|
||||
function sendLogEventToAppender(logEvent) {
|
||||
if (!enabled) return;
|
||||
debug('Received log event ', logEvent);
|
||||
const categoryAppenders = categories.appendersForCategory(logEvent.categoryName);
|
||||
categoryAppenders.forEach((appender) => {
|
||||
debug("Received log event ", logEvent);
|
||||
const categoryAppenders = categories.appendersForCategory(
|
||||
logEvent.categoryName
|
||||
);
|
||||
categoryAppenders.forEach(appender => {
|
||||
appender(logEvent);
|
||||
});
|
||||
}
|
||||
|
||||
function loadConfigurationFile(filename) {
|
||||
if (filename) {
|
||||
debug(`Loading configuration from ${filename}`);
|
||||
return JSON.parse(fs.readFileSync(filename, 'utf8'));
|
||||
debug(`Loading configuration from ${filename}`);
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(filename, "utf8"));
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Problem reading config from file "${filename}". Error was ${e.message}`,
|
||||
e
|
||||
);
|
||||
}
|
||||
return filename;
|
||||
}
|
||||
|
||||
function configure(configurationFileOrObject) {
|
||||
let configObject = configurationFileOrObject;
|
||||
|
||||
if (typeof configObject === 'string') {
|
||||
if (typeof configObject === "string") {
|
||||
configObject = loadConfigurationFile(configurationFileOrObject);
|
||||
}
|
||||
debug(`Configuration is ${configObject}`);
|
||||
@ -78,14 +82,17 @@ function configure(configurationFileOrObject) {
|
||||
* as the first argument.
|
||||
*/
|
||||
function shutdown(cb) {
|
||||
debug('Shutdown called. Disabling all log writing.');
|
||||
debug("Shutdown called. Disabling all log writing.");
|
||||
// First, disable all writing to appenders. This prevents appenders from
|
||||
// not being able to be drained because of run-away log writes.
|
||||
enabled = false;
|
||||
|
||||
// Call each of the shutdown functions in parallel
|
||||
const appendersToCheck = Array.from(appenders.values());
|
||||
const shutdownFunctions = appendersToCheck.reduceRight((accum, next) => (next.shutdown ? accum + 1 : accum), 0);
|
||||
const shutdownFunctions = appendersToCheck.reduceRight(
|
||||
(accum, next) => (next.shutdown ? accum + 1 : accum),
|
||||
0
|
||||
);
|
||||
let completed = 0;
|
||||
let error;
|
||||
|
||||
@ -95,13 +102,13 @@ function shutdown(cb) {
|
||||
completed += 1;
|
||||
debug(`Appender shutdowns complete: ${completed} / ${shutdownFunctions}`);
|
||||
if (completed >= shutdownFunctions) {
|
||||
debug('All shutdown functions completed.');
|
||||
debug("All shutdown functions completed.");
|
||||
cb(error);
|
||||
}
|
||||
}
|
||||
|
||||
if (shutdownFunctions === 0) {
|
||||
debug('No appenders with shutdown functions found.');
|
||||
debug("No appenders with shutdown functions found.");
|
||||
return cb();
|
||||
}
|
||||
|
||||
@ -118,15 +125,16 @@ function shutdown(cb) {
|
||||
*/
|
||||
function getLogger(category) {
|
||||
if (!enabled) {
|
||||
configure(process.env.LOG4JS_CONFIG || {
|
||||
appenders: { out: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['out'], level: 'OFF' } }
|
||||
});
|
||||
configure(
|
||||
process.env.LOG4JS_CONFIG || {
|
||||
appenders: { out: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["out"], level: "OFF" } }
|
||||
}
|
||||
);
|
||||
}
|
||||
return new Logger(category || 'default');
|
||||
return new Logger(category || "default");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @name log4js
|
||||
* @namespace Log4js
|
||||
@ -140,7 +148,7 @@ const log4js = {
|
||||
shutdown,
|
||||
connectLogger,
|
||||
levels,
|
||||
addLayout: layouts.addLayout,
|
||||
addLayout: layouts.addLayout
|
||||
};
|
||||
|
||||
module.exports = log4js;
|
||||
|
||||
@ -1,17 +1,14 @@
|
||||
/* eslint no-underscore-dangle:0 */
|
||||
|
||||
'use strict';
|
||||
|
||||
const debug = require('debug')('log4js:logger');
|
||||
const LoggingEvent = require('./LoggingEvent');
|
||||
const levels = require('./levels');
|
||||
const clustering = require('./clustering');
|
||||
const categories = require('./categories');
|
||||
const configuration = require('./configuration');
|
||||
const debug = require("debug")("log4js:logger");
|
||||
const LoggingEvent = require("./LoggingEvent");
|
||||
const levels = require("./levels");
|
||||
const clustering = require("./clustering");
|
||||
const categories = require("./categories");
|
||||
const configuration = require("./configuration");
|
||||
|
||||
const stackReg = /at (?:(.+)\s+\()?(?:(.+?):(\d+)(?::(\d+))?|([^)]+))\)?/;
|
||||
function defaultParseCallStack(data, skipIdx = 4) {
|
||||
const stacklines = data.stack.split('\n').slice(skipIdx);
|
||||
const stacklines = data.stack.split("\n").slice(skipIdx);
|
||||
const lineMatch = stackReg.exec(stacklines[0]);
|
||||
if (lineMatch && lineMatch.length === 6) {
|
||||
return {
|
||||
@ -19,7 +16,7 @@ function defaultParseCallStack(data, skipIdx = 4) {
|
||||
fileName: lineMatch[2],
|
||||
lineNumber: parseInt(lineMatch[3], 10),
|
||||
columnNumber: parseInt(lineMatch[4], 10),
|
||||
callStack: stacklines.join('\n'),
|
||||
callStack: stacklines.join("\n")
|
||||
};
|
||||
}
|
||||
return null;
|
||||
@ -40,7 +37,7 @@ function defaultParseCallStack(data, skipIdx = 4) {
|
||||
class Logger {
|
||||
constructor(name) {
|
||||
if (!name) {
|
||||
throw new Error('No category provided.');
|
||||
throw new Error("No category provided.");
|
||||
}
|
||||
this.category = name;
|
||||
this.context = {};
|
||||
@ -49,11 +46,17 @@ class Logger {
|
||||
}
|
||||
|
||||
get level() {
|
||||
return levels.getLevel(categories.getLevelForCategory(this.category), levels.TRACE);
|
||||
return levels.getLevel(
|
||||
categories.getLevelForCategory(this.category),
|
||||
levels.TRACE
|
||||
);
|
||||
}
|
||||
|
||||
set level(level) {
|
||||
categories.setLevelForCategory(this.category, levels.getLevel(level, this.level));
|
||||
categories.setLevelForCategory(
|
||||
this.category,
|
||||
levels.getLevel(level, this.level)
|
||||
);
|
||||
}
|
||||
|
||||
get useCallStack() {
|
||||
@ -61,7 +64,7 @@ class Logger {
|
||||
}
|
||||
|
||||
set useCallStack(bool) {
|
||||
categories.setEnableCallStackForCategory(this.category, (bool === true));
|
||||
categories.setEnableCallStackForCategory(this.category, bool === true);
|
||||
}
|
||||
|
||||
log(level, ...args) {
|
||||
@ -82,7 +85,7 @@ class Logger {
|
||||
level,
|
||||
data,
|
||||
this.context,
|
||||
(this.useCallStack) && this.parseCallStack(new Error())
|
||||
this.useCallStack && this.parseCallStack(new Error())
|
||||
);
|
||||
clustering.send(loggingEvent);
|
||||
}
|
||||
@ -108,14 +111,16 @@ function addLevelMethods(target) {
|
||||
const level = levels.getLevel(target);
|
||||
|
||||
const levelStrLower = level.toString().toLowerCase();
|
||||
const levelMethod = levelStrLower.replace(/_([a-z])/g, g => g[1].toUpperCase());
|
||||
const levelMethod = levelStrLower.replace(/_([a-z])/g, g =>
|
||||
g[1].toUpperCase()
|
||||
);
|
||||
const isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
|
||||
|
||||
Logger.prototype[`is${isLevelMethod}Enabled`] = function () {
|
||||
Logger.prototype[`is${isLevelMethod}Enabled`] = function() {
|
||||
return this.isLevelEnabled(level);
|
||||
};
|
||||
|
||||
Logger.prototype[levelMethod] = function (...args) {
|
||||
Logger.prototype[levelMethod] = function(...args) {
|
||||
this.log(level, ...args);
|
||||
};
|
||||
}
|
||||
|
||||
635
package-lock.json
generated
635
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -51,12 +51,15 @@
|
||||
"callsites": "^3.1.0",
|
||||
"codecov": "^3.5.0",
|
||||
"deep-freeze": "0.0.1",
|
||||
"eslint": "^6.1.0",
|
||||
"eslint": "^5.3.0",
|
||||
"eslint-config-airbnb-base": "^13.2.0",
|
||||
"eslint-config-prettier": "^6.0.0",
|
||||
"eslint-import-resolver-node": "^0.3.2",
|
||||
"eslint-plugin-import": "^2.18.2",
|
||||
"eslint-plugin-prettier": "^3.1.0",
|
||||
"husky": "^3.0.2",
|
||||
"nyc": "^14.1.1",
|
||||
"prettier": "^1.18.2",
|
||||
"tap": "^14.5.0",
|
||||
"typescript": "^3.5.3",
|
||||
"validate-commit-msg": "^2.14.0"
|
||||
|
||||
@ -1,14 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
|
||||
sandbox.configure({
|
||||
sourceTransformers: {
|
||||
nyc: function (source) {
|
||||
if (this.filename.indexOf('node_modules') > -1) {
|
||||
nyc(source) {
|
||||
if (this.filename.indexOf("node_modules") > -1) {
|
||||
return source;
|
||||
}
|
||||
const nyc = new (require('nyc'))();
|
||||
const nyc = new (require("nyc"))();
|
||||
return nyc.instrumenter().instrumentSync(source, this.filename);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,49 +1,51 @@
|
||||
const flatted = require('flatted');
|
||||
const test = require('tap').test;
|
||||
const LoggingEvent = require('../../lib/LoggingEvent');
|
||||
const levels = require('../../lib/levels');
|
||||
const flatted = require("flatted");
|
||||
const { test } = require("tap");
|
||||
const LoggingEvent = require("../../lib/LoggingEvent");
|
||||
const levels = require("../../lib/levels");
|
||||
|
||||
test('LoggingEvent', (batch) => {
|
||||
batch.test('should serialise to flatted', (t) => {
|
||||
const event = new LoggingEvent('cheese', levels.DEBUG, ['log message'], { user: 'bob' });
|
||||
test("LoggingEvent", batch => {
|
||||
batch.test("should serialise to flatted", t => {
|
||||
const event = new LoggingEvent("cheese", levels.DEBUG, ["log message"], {
|
||||
user: "bob"
|
||||
});
|
||||
// set the event date to a known value
|
||||
event.startTime = new Date(Date.UTC(2018, 1, 4, 18, 30, 23, 10));
|
||||
const rehydratedEvent = flatted.parse(event.serialise());
|
||||
t.equal(rehydratedEvent.startTime, '2018-02-04T18:30:23.010Z');
|
||||
t.equal(rehydratedEvent.categoryName, 'cheese');
|
||||
t.equal(rehydratedEvent.level.levelStr, 'DEBUG');
|
||||
t.equal(rehydratedEvent.startTime, "2018-02-04T18:30:23.010Z");
|
||||
t.equal(rehydratedEvent.categoryName, "cheese");
|
||||
t.equal(rehydratedEvent.level.levelStr, "DEBUG");
|
||||
t.equal(rehydratedEvent.data.length, 1);
|
||||
t.equal(rehydratedEvent.data[0], 'log message');
|
||||
t.equal(rehydratedEvent.context.user, 'bob');
|
||||
t.equal(rehydratedEvent.data[0], "log message");
|
||||
t.equal(rehydratedEvent.context.user, "bob");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should deserialise from flatted', (t) => {
|
||||
batch.test("should deserialise from flatted", t => {
|
||||
const dehydratedEvent = flatted.stringify({
|
||||
startTime: '2018-02-04T10:25:23.010Z',
|
||||
categoryName: 'biscuits',
|
||||
startTime: "2018-02-04T10:25:23.010Z",
|
||||
categoryName: "biscuits",
|
||||
level: {
|
||||
levelStr: 'INFO'
|
||||
levelStr: "INFO"
|
||||
},
|
||||
data: ['some log message', { x: 1 }],
|
||||
context: { thing: 'otherThing' }
|
||||
data: ["some log message", { x: 1 }],
|
||||
context: { thing: "otherThing" }
|
||||
});
|
||||
const event = LoggingEvent.deserialise(dehydratedEvent);
|
||||
t.type(event, LoggingEvent);
|
||||
t.same(event.startTime, new Date(Date.UTC(2018, 1, 4, 10, 25, 23, 10)));
|
||||
t.equal(event.categoryName, 'biscuits');
|
||||
t.equal(event.categoryName, "biscuits");
|
||||
t.same(event.level, levels.INFO);
|
||||
t.equal(event.data[0], 'some log message');
|
||||
t.equal(event.data[0], "some log message");
|
||||
t.equal(event.data[1].x, 1);
|
||||
t.equal(event.context.thing, 'otherThing');
|
||||
t.equal(event.context.thing, "otherThing");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('Should correct construct with/without location info', (t) => {
|
||||
batch.test("Should correct construct with/without location info", t => {
|
||||
// console.log([Error('123').stack.split('\n').slice(1).join('\n')])
|
||||
/* eslint-disable-next-line */
|
||||
const callStack = ' at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)';
|
||||
const fileName = '/log4js-node/test/tap/layouts-test.js';
|
||||
const callStack =
|
||||
" at repl:1:14\n at ContextifyScript.Script.runInThisContext (vm.js:50:33)\n at REPLServer.defaultEval (repl.js:240:29)\n at bound (domain.js:301:14)\n at REPLServer.runBound [as eval] (domain.js:314:12)\n at REPLServer.onLine (repl.js:468:10)\n at emitOne (events.js:121:20)\n at REPLServer.emit (events.js:211:7)\n at REPLServer.Interface._onLine (readline.js:280:10)\n at REPLServer.Interface._line (readline.js:629:8)"; // eslint-disable-line
|
||||
const fileName = "/log4js-node/test/tap/layouts-test.js";
|
||||
const lineNumber = 1;
|
||||
const columnNumber = 14;
|
||||
const location = {
|
||||
@ -52,13 +54,21 @@ test('LoggingEvent', (batch) => {
|
||||
columnNumber,
|
||||
callStack
|
||||
};
|
||||
const event = new LoggingEvent('cheese', levels.DEBUG, ['log message'], { user: 'bob' }, location);
|
||||
const event = new LoggingEvent(
|
||||
"cheese",
|
||||
levels.DEBUG,
|
||||
["log message"],
|
||||
{ user: "bob" },
|
||||
location
|
||||
);
|
||||
t.equal(event.fileName, fileName);
|
||||
t.equal(event.lineNumber, lineNumber);
|
||||
t.equal(event.columnNumber, columnNumber);
|
||||
t.equal(event.callStack, callStack);
|
||||
|
||||
const event2 = new LoggingEvent('cheese', levels.DEBUG, ['log message'], { user: 'bob' });
|
||||
const event2 = new LoggingEvent("cheese", levels.DEBUG, ["log message"], {
|
||||
user: "bob"
|
||||
});
|
||||
t.equal(event2.fileName, undefined);
|
||||
t.equal(event2.lineNumber, undefined);
|
||||
t.equal(event2.columnNumber, undefined);
|
||||
|
||||
@ -1,60 +1,71 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recording = require("../../lib/appenders/recording");
|
||||
|
||||
const test = require('tap').test;
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recording = require('../../lib/appenders/recording');
|
||||
test("log4js categoryFilter", batch => {
|
||||
batch.beforeEach(done => {
|
||||
recording.reset();
|
||||
done();
|
||||
});
|
||||
|
||||
test('log4js categoryFilter', (batch) => {
|
||||
batch.beforeEach((done) => { recording.reset(); done(); });
|
||||
|
||||
batch.test('appender should exclude categories', (t) => {
|
||||
batch.test("appender should exclude categories", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: 'categoryFilter',
|
||||
exclude: 'web',
|
||||
appender: 'recorder'
|
||||
type: "categoryFilter",
|
||||
exclude: "web",
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const webLogger = log4js.getLogger('web');
|
||||
const appLogger = log4js.getLogger('app');
|
||||
const webLogger = log4js.getLogger("web");
|
||||
const appLogger = log4js.getLogger("app");
|
||||
|
||||
webLogger.debug('This should not get logged');
|
||||
appLogger.debug('This should get logged');
|
||||
webLogger.debug('Hello again');
|
||||
log4js.getLogger('db').debug('This should be included by the appender anyway');
|
||||
webLogger.debug("This should not get logged");
|
||||
appLogger.debug("This should get logged");
|
||||
webLogger.debug("Hello again");
|
||||
log4js
|
||||
.getLogger("db")
|
||||
.debug("This should be included by the appender anyway");
|
||||
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[1].data[0], 'This should be included by the appender anyway');
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.equal(
|
||||
logEvents[1].data[0],
|
||||
"This should be included by the appender anyway"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should not really need a category filter any more', (t) => {
|
||||
batch.test("should not really need a category filter any more", t => {
|
||||
log4js.configure({
|
||||
appenders: { recorder: { type: 'recording' } },
|
||||
appenders: { recorder: { type: "recording" } },
|
||||
categories: {
|
||||
default: { appenders: ['recorder'], level: 'DEBUG' },
|
||||
web: { appenders: ['recorder'], level: 'OFF' }
|
||||
default: { appenders: ["recorder"], level: "DEBUG" },
|
||||
web: { appenders: ["recorder"], level: "OFF" }
|
||||
}
|
||||
});
|
||||
const appLogger = log4js.getLogger('app');
|
||||
const webLogger = log4js.getLogger('web');
|
||||
const appLogger = log4js.getLogger("app");
|
||||
const webLogger = log4js.getLogger("web");
|
||||
|
||||
webLogger.debug('This should not get logged');
|
||||
appLogger.debug('This should get logged');
|
||||
webLogger.debug('Hello again');
|
||||
log4js.getLogger('db').debug('This should be included by the appender anyway');
|
||||
webLogger.debug("This should not get logged");
|
||||
appLogger.debug("This should get logged");
|
||||
webLogger.debug("Hello again");
|
||||
log4js
|
||||
.getLogger("db")
|
||||
.debug("This should be included by the appender anyway");
|
||||
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[1].data[0], 'This should be included by the appender anyway');
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.equal(
|
||||
logEvents[1].data[0],
|
||||
"This should be included by the appender anyway"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
@ -1,64 +1,62 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const cluster = require('cluster');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recorder = require('../../lib/appenders/recording');
|
||||
const { test } = require("tap");
|
||||
const cluster = require("cluster");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recorder = require("../../lib/appenders/recording");
|
||||
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
vcr: { type: 'recording' }
|
||||
vcr: { type: "recording" }
|
||||
},
|
||||
categories: { default: { appenders: ['vcr'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["vcr"], level: "debug" } }
|
||||
});
|
||||
|
||||
if (cluster.isMaster) {
|
||||
cluster.fork();
|
||||
|
||||
const masterLogger = log4js.getLogger('master');
|
||||
const masterLogger = log4js.getLogger("master");
|
||||
const masterPid = process.pid;
|
||||
masterLogger.info('this is master');
|
||||
masterLogger.info("this is master");
|
||||
|
||||
let workerLevel;
|
||||
cluster.on('message', (worker, message) => {
|
||||
cluster.on("message", (worker, message) => {
|
||||
if (worker.type || worker.topic) {
|
||||
message = worker;
|
||||
}
|
||||
if (message.type && message.type === '::testing') {
|
||||
if (message.type && message.type === "::testing") {
|
||||
workerLevel = message.level;
|
||||
}
|
||||
});
|
||||
|
||||
cluster.on('exit', (worker) => {
|
||||
cluster.on("exit", worker => {
|
||||
const workerPid = worker.process.pid;
|
||||
const logEvents = recorder.replay();
|
||||
|
||||
test('cluster master', (batch) => {
|
||||
batch.test('events should be logged', (t) => {
|
||||
test("cluster master", batch => {
|
||||
batch.test("events should be logged", t => {
|
||||
t.equal(logEvents.length, 3);
|
||||
|
||||
t.equal(logEvents[0].categoryName, 'master');
|
||||
t.equal(logEvents[0].categoryName, "master");
|
||||
t.equal(logEvents[0].pid, masterPid);
|
||||
|
||||
t.equal(logEvents[1].categoryName, 'worker');
|
||||
t.equal(logEvents[1].categoryName, "worker");
|
||||
t.equal(logEvents[1].pid, workerPid);
|
||||
// serialising errors with stacks intact
|
||||
t.type(logEvents[1].data[1], 'Error');
|
||||
t.contains(logEvents[1].data[1].stack, 'Error: oh dear');
|
||||
t.type(logEvents[1].data[1], "Error");
|
||||
t.contains(logEvents[1].data[1].stack, "Error: oh dear");
|
||||
// serialising circular references in objects
|
||||
t.type(logEvents[1].data[2], 'object');
|
||||
t.type(logEvents[1].data[2].me, 'object');
|
||||
t.type(logEvents[1].data[2], "object");
|
||||
t.type(logEvents[1].data[2].me, "object");
|
||||
// serialising errors with custom properties
|
||||
t.type(logEvents[1].data[3], 'Error');
|
||||
t.contains(logEvents[1].data[3].stack, 'Error: wtf');
|
||||
t.equal(logEvents[1].data[3].alert, 'chartreuse');
|
||||
t.type(logEvents[1].data[3], "Error");
|
||||
t.contains(logEvents[1].data[3].stack, "Error: wtf");
|
||||
t.equal(logEvents[1].data[3].alert, "chartreuse");
|
||||
// serialising things that are not errors, but look a bit like them
|
||||
t.type(logEvents[1].data[4], 'object');
|
||||
t.equal(logEvents[1].data[4].stack, 'this is not a stack trace');
|
||||
t.type(logEvents[1].data[4], "object");
|
||||
t.equal(logEvents[1].data[4].stack, "this is not a stack trace");
|
||||
|
||||
t.equal(logEvents[2].categoryName, 'log4js');
|
||||
t.equal(logEvents[2].level.toString(), 'ERROR');
|
||||
t.equal(logEvents[2].data[0], 'Unable to parse log:');
|
||||
t.equal(logEvents[2].categoryName, "log4js");
|
||||
t.equal(logEvents[2].level.toString(), "ERROR");
|
||||
t.equal(logEvents[2].data[0], "Unable to parse log:");
|
||||
|
||||
t.end();
|
||||
});
|
||||
@ -66,31 +64,37 @@ if (cluster.isMaster) {
|
||||
batch.end();
|
||||
});
|
||||
|
||||
test('cluster worker', (batch) => {
|
||||
batch.test('logger should get correct config', (t) => {
|
||||
t.equal(workerLevel, 'DEBUG');
|
||||
test("cluster worker", batch => {
|
||||
batch.test("logger should get correct config", t => {
|
||||
t.equal(workerLevel, "DEBUG");
|
||||
t.end();
|
||||
});
|
||||
batch.end();
|
||||
});
|
||||
});
|
||||
} else {
|
||||
const workerLogger = log4js.getLogger('worker');
|
||||
const workerLogger = log4js.getLogger("worker");
|
||||
// test for serialising circular references
|
||||
const circle = {};
|
||||
circle.me = circle;
|
||||
// test for serialising errors with their own properties
|
||||
const someError = new Error('wtf');
|
||||
someError.alert = 'chartreuse';
|
||||
const someError = new Error("wtf");
|
||||
someError.alert = "chartreuse";
|
||||
// test for serialising things that look like errors but aren't.
|
||||
const notAnError = { stack: 'this is not a stack trace' };
|
||||
workerLogger.info('this is worker', new Error('oh dear'), circle, someError, notAnError);
|
||||
const notAnError = { stack: "this is not a stack trace" };
|
||||
workerLogger.info(
|
||||
"this is worker",
|
||||
new Error("oh dear"),
|
||||
circle,
|
||||
someError,
|
||||
notAnError
|
||||
);
|
||||
// can't run the test in the worker, things get weird
|
||||
process.send({
|
||||
type: '::testing',
|
||||
type: "::testing",
|
||||
level: workerLogger.level.toString()
|
||||
});
|
||||
// test sending a badly-formed log message
|
||||
process.send({ topic: 'log4js:message', data: { cheese: 'gouda' } });
|
||||
process.send({ topic: "log4js:message", data: { cheese: "gouda" } });
|
||||
cluster.worker.disconnect();
|
||||
}
|
||||
|
||||
@ -1,277 +1,327 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const categories = require("../../lib/categories");
|
||||
|
||||
const test = require('tap').test;
|
||||
const log4js = require('../../lib/log4js');
|
||||
const categories = require('../../lib/categories');
|
||||
|
||||
test('log4js category inherit all appenders from direct parent', (batch) => {
|
||||
batch.test('should inherit appenders from direct parent', (t) => {
|
||||
test("log4js category inherit all appenders from direct parent", batch => {
|
||||
batch.test("should inherit appenders from direct parent", t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1', 'stdout2'], level: 'INFO' },
|
||||
'catA.catB': { level: 'DEBUG' }
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1", "stdout2"], level: "INFO" },
|
||||
"catA.catB": { level: "DEBUG" }
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB';
|
||||
const childCategoryName = "catA.catB";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
const childLevel = categories.getLevelForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 2, 'inherited 2 appenders');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout2'), 'inherited stdout2');
|
||||
t.isEqual(childLevel.levelStr, 'DEBUG', 'child level overrides parent');
|
||||
t.isEqual(childAppenders.length, 2, "inherited 2 appenders");
|
||||
t.ok(childAppenders.some(a => a.label === "stdout1"), "inherited stdout1");
|
||||
t.ok(childAppenders.some(a => a.label === "stdout2"), "inherited stdout2");
|
||||
t.isEqual(childLevel.levelStr, "DEBUG", "child level overrides parent");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('multiple children should inherit config from shared parent', (t) => {
|
||||
batch.test(
|
||||
"multiple children should inherit config from shared parent",
|
||||
t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1"], level: "INFO" },
|
||||
"catA.catB.cat1": { level: "DEBUG" }, // should get sdtout1, DEBUG
|
||||
"catA.catB.cat2": { appenders: ["stdout2"] } // should get sdtout1,sdtout2, INFO
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const child1CategoryName = "catA.catB.cat1";
|
||||
const child1Appenders = categories.appendersForCategory(
|
||||
child1CategoryName
|
||||
);
|
||||
const child1Level = categories.getLevelForCategory(child1CategoryName);
|
||||
|
||||
t.isEqual(child1Appenders.length, 1, "inherited 1 appender");
|
||||
t.ok(
|
||||
child1Appenders.some(a => a.label === "stdout1"),
|
||||
"inherited stdout1"
|
||||
);
|
||||
t.isEqual(child1Level.levelStr, "DEBUG", "child level overrides parent");
|
||||
|
||||
const child2CategoryName = "catA.catB.cat2";
|
||||
const child2Appenders = categories.appendersForCategory(
|
||||
child2CategoryName
|
||||
);
|
||||
const child2Level = categories.getLevelForCategory(child2CategoryName);
|
||||
|
||||
t.ok(child2Appenders);
|
||||
t.isEqual(
|
||||
child2Appenders.length,
|
||||
2,
|
||||
"inherited 1 appenders, plus its original"
|
||||
);
|
||||
t.ok(
|
||||
child2Appenders.some(a => a.label === "stdout1"),
|
||||
"inherited stdout1"
|
||||
);
|
||||
t.ok(child2Appenders.some(a => a.label === "stdout2"), "kept stdout2");
|
||||
t.isEqual(child2Level.levelStr, "INFO", "inherited parent level");
|
||||
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test("should inherit appenders from multiple parents", t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1'], level: 'INFO' },
|
||||
'catA.catB.cat1': { level: 'DEBUG' }, // should get sdtout1, DEBUG
|
||||
'catA.catB.cat2': { appenders: ['stdout2'] } // should get sdtout1,sdtout2, INFO
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1"], level: "INFO" },
|
||||
"catA.catB": { appenders: ["stdout2"], level: "INFO" }, // should get stdout1 and stdout2
|
||||
"catA.catB.catC": { level: "DEBUG" } // should get stdout1 and stdout2
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const child1CategoryName = 'catA.catB.cat1';
|
||||
const child1Appenders = categories.appendersForCategory(child1CategoryName);
|
||||
const child1Level = categories.getLevelForCategory(child1CategoryName);
|
||||
|
||||
t.isEqual(child1Appenders.length, 1, 'inherited 1 appender');
|
||||
t.ok(child1Appenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
t.isEqual(child1Level.levelStr, 'DEBUG', 'child level overrides parent');
|
||||
|
||||
const child2CategoryName = 'catA.catB.cat2';
|
||||
const child2Appenders = categories.appendersForCategory(child2CategoryName);
|
||||
const child2Level = categories.getLevelForCategory(child2CategoryName);
|
||||
|
||||
t.ok(child2Appenders);
|
||||
t.isEqual(child2Appenders.length, 2, 'inherited 1 appenders, plus its original');
|
||||
t.ok(child2Appenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
t.ok(child2Appenders.some(a => a.label === 'stdout2'), 'kept stdout2');
|
||||
t.isEqual(child2Level.levelStr, 'INFO', 'inherited parent level');
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should inherit appenders from multiple parents', (t) => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1'], level: 'INFO' },
|
||||
'catA.catB': { appenders: ['stdout2'], level: 'INFO' }, // should get stdout1 and stdout2
|
||||
'catA.catB.catC': { level: 'DEBUG' } // should get stdout1 and stdout2
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB.catC';
|
||||
const childCategoryName = "catA.catB.catC";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 2, 'inherited 2 appenders');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
t.isEqual(childAppenders.length, 2, "inherited 2 appenders");
|
||||
t.ok(childAppenders.some(a => a.label === "stdout1"), "inherited stdout1");
|
||||
t.ok(childAppenders.some(a => a.label === "stdout1"), "inherited stdout1");
|
||||
|
||||
const firstParentName = 'catA.catB';
|
||||
const firstParentAppenders = categories.appendersForCategory(firstParentName);
|
||||
const firstParentName = "catA.catB";
|
||||
const firstParentAppenders = categories.appendersForCategory(
|
||||
firstParentName
|
||||
);
|
||||
|
||||
t.ok(firstParentAppenders);
|
||||
t.isEqual(firstParentAppenders.length, 2, 'ended up with 2 appenders');
|
||||
t.ok(firstParentAppenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
t.ok(firstParentAppenders.some(a => a.label === 'stdout2'), 'kept stdout2');
|
||||
t.isEqual(firstParentAppenders.length, 2, "ended up with 2 appenders");
|
||||
t.ok(
|
||||
firstParentAppenders.some(a => a.label === "stdout1"),
|
||||
"inherited stdout1"
|
||||
);
|
||||
t.ok(firstParentAppenders.some(a => a.label === "stdout2"), "kept stdout2");
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should inherit appenders from deep parent with missing direct parent', (t) => {
|
||||
batch.test(
|
||||
"should inherit appenders from deep parent with missing direct parent",
|
||||
t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1"], level: "INFO" },
|
||||
// no catA.catB, but should get created, with stdout1
|
||||
"catA.catB.catC": { level: "DEBUG" } // should get stdout1
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = "catA.catB.catC";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 1, "inherited 1 appenders");
|
||||
t.ok(
|
||||
childAppenders.some(a => a.label === "stdout1"),
|
||||
"inherited stdout1"
|
||||
);
|
||||
|
||||
const firstParentCategoryName = "catA.catB";
|
||||
const firstParentAppenders = categories.appendersForCategory(
|
||||
firstParentCategoryName
|
||||
);
|
||||
|
||||
t.ok(firstParentAppenders, "catA.catB got created implicitily");
|
||||
t.isEqual(
|
||||
firstParentAppenders.length,
|
||||
1,
|
||||
"created with 1 inherited appender"
|
||||
);
|
||||
t.ok(
|
||||
firstParentAppenders.some(a => a.label === "stdout1"),
|
||||
"inherited stdout1"
|
||||
);
|
||||
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test("should deal gracefully with missing parent", t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1'], level: 'INFO' },
|
||||
// no catA.catB, but should get created, with stdout1
|
||||
'catA.catB.catC': { level: 'DEBUG' } // should get stdout1
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB.catC';
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 1, 'inherited 1 appenders');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
|
||||
const firstParentCategoryName = 'catA.catB';
|
||||
const firstParentAppenders = categories.appendersForCategory(firstParentCategoryName);
|
||||
|
||||
t.ok(firstParentAppenders, 'catA.catB got created implicitily');
|
||||
t.isEqual(firstParentAppenders.length, 1, 'created with 1 inherited appender');
|
||||
t.ok(firstParentAppenders.some(a => a.label === 'stdout1'), 'inherited stdout1');
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should deal gracefully with missing parent', (t) => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
// no catA nor catA.catB, but should get created, with default values
|
||||
'catA.catB.catC': { appenders: ['stdout2'], level: 'DEBUG' } // should get stdout2, DEBUG
|
||||
"catA.catB.catC": { appenders: ["stdout2"], level: "DEBUG" } // should get stdout2, DEBUG
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB.catC';
|
||||
const childCategoryName = "catA.catB.catC";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 1);
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout2'));
|
||||
t.ok(childAppenders.some(a => a.label === "stdout2"));
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test(
|
||||
"should not get duplicate appenders if parent has the same one",
|
||||
t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1", "stdout2"], level: "INFO" },
|
||||
"catA.catB": { appenders: ["stdout1"], level: "DEBUG" }
|
||||
}
|
||||
};
|
||||
|
||||
batch.test('should not get duplicate appenders if parent has the same one', (t) => {
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = "catA.catB";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 2, "inherited 1 appender");
|
||||
t.ok(
|
||||
childAppenders.some(a => a.label === "stdout1"),
|
||||
"still have stdout1"
|
||||
);
|
||||
t.ok(
|
||||
childAppenders.some(a => a.label === "stdout2"),
|
||||
"inherited stdout2"
|
||||
);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test("inherit:falses should disable inheritance", t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1', 'stdout2'], level: 'INFO' },
|
||||
'catA.catB': { appenders: ['stdout1'], level: 'DEBUG' }
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1"], level: "INFO" },
|
||||
"catA.catB": { appenders: ["stdout2"], level: "INFO", inherit: false } // should not inherit from catA
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB';
|
||||
const childCategoryName = "catA.catB";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 2, 'inherited 1 appender');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout1'), 'still have stdout1');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout2'), 'inherited stdout2');
|
||||
t.isEqual(childAppenders.length, 1, "inherited no appender");
|
||||
t.ok(childAppenders.some(a => a.label === "stdout2"), "kept stdout2");
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('inherit:falses should disable inheritance', (t) => {
|
||||
batch.test("inheritance should stop if direct parent has inherit off", t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1'], level: 'INFO' },
|
||||
'catA.catB': { appenders: ['stdout2'], level: 'INFO', inherit: false }, // should not inherit from catA
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1"], level: "INFO" },
|
||||
"catA.catB": { appenders: ["stdout2"], level: "INFO", inherit: false }, // should not inherit from catA
|
||||
"catA.catB.catC": { level: "DEBUG" } // should inherit from catB only
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB';
|
||||
const childCategoryName = "catA.catB.catC";
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 1, 'inherited no appender');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout2'), 'kept stdout2');
|
||||
t.isEqual(childAppenders.length, 1, "inherited 1 appender");
|
||||
t.ok(childAppenders.some(a => a.label === "stdout2"), "inherited stdout2");
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
batch.test('inheritance should stop if direct parent has inherit off', (t) => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1'], level: 'INFO' },
|
||||
'catA.catB': { appenders: ['stdout2'], level: 'INFO', inherit: false }, // should not inherit from catA
|
||||
'catA.catB.catC': { level: 'DEBUG' } // should inherit from catB only
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB.catC';
|
||||
const childAppenders = categories.appendersForCategory(childCategoryName);
|
||||
|
||||
t.ok(childAppenders);
|
||||
t.isEqual(childAppenders.length, 1, 'inherited 1 appender');
|
||||
t.ok(childAppenders.some(a => a.label === 'stdout2'), 'inherited stdout2');
|
||||
|
||||
const firstParentCategoryName = 'catA.catB';
|
||||
const firstParentAppenders = categories.appendersForCategory(firstParentCategoryName);
|
||||
const firstParentCategoryName = "catA.catB";
|
||||
const firstParentAppenders = categories.appendersForCategory(
|
||||
firstParentCategoryName
|
||||
);
|
||||
|
||||
t.ok(firstParentAppenders);
|
||||
t.isEqual(firstParentAppenders.length, 1, 'did not inherit new appenders');
|
||||
t.ok(firstParentAppenders.some(a => a.label === 'stdout2'), 'kept stdout2');
|
||||
t.isEqual(firstParentAppenders.length, 1, "did not inherit new appenders");
|
||||
t.ok(firstParentAppenders.some(a => a.label === "stdout2"), "kept stdout2");
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should inherit level when it is missing', (t) => {
|
||||
batch.test("should inherit level when it is missing", t => {
|
||||
const config = {
|
||||
appenders: {
|
||||
stdout1: { type: 'dummy-appender', label: 'stdout1' },
|
||||
stdout2: { type: 'dummy-appender', label: 'stdout2' }
|
||||
stdout1: { type: "dummy-appender", label: "stdout1" },
|
||||
stdout2: { type: "dummy-appender", label: "stdout2" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout1'], level: 'ERROR' },
|
||||
catA: { appenders: ['stdout1'], level: 'INFO' },
|
||||
default: { appenders: ["stdout1"], level: "ERROR" },
|
||||
catA: { appenders: ["stdout1"], level: "INFO" },
|
||||
// no catA.catB, but should get created, with stdout1, level INFO
|
||||
'catA.catB.catC': {} // should get stdout1, level INFO
|
||||
"catA.catB.catC": {} // should get stdout1, level INFO
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
|
||||
const childCategoryName = 'catA.catB.catC';
|
||||
const childCategoryName = "catA.catB.catC";
|
||||
const childLevel = categories.getLevelForCategory(childCategoryName);
|
||||
|
||||
t.isEqual(childLevel.levelStr, 'INFO', 'inherited level');
|
||||
t.isEqual(childLevel.levelStr, "INFO", "inherited level");
|
||||
|
||||
const firstParentCategoryName = 'catA.catB';
|
||||
const firstParentLevel = categories.getLevelForCategory(firstParentCategoryName);
|
||||
const firstParentCategoryName = "catA.catB";
|
||||
const firstParentLevel = categories.getLevelForCategory(
|
||||
firstParentCategoryName
|
||||
);
|
||||
|
||||
t.isEqual(firstParentLevel.levelStr, 'INFO', 'generate parent inherited level from base');
|
||||
t.isEqual(
|
||||
firstParentLevel.levelStr,
|
||||
"INFO",
|
||||
"generate parent inherited level from base"
|
||||
);
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -1,18 +1,16 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const realFS = require("fs");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const realFS = require('fs');
|
||||
|
||||
const modulePath = 'some/path/to/mylog4js.json';
|
||||
const modulePath = "some/path/to/mylog4js.json";
|
||||
const pathsChecked = [];
|
||||
|
||||
let fakeFS = {};
|
||||
let dependencies;
|
||||
let fileRead;
|
||||
|
||||
test('log4js configure', (batch) => {
|
||||
batch.beforeEach((done) => {
|
||||
test("log4js configure", batch => {
|
||||
batch.beforeEach(done => {
|
||||
fileRead = 0;
|
||||
|
||||
fakeFS = {
|
||||
@ -23,31 +21,31 @@ test('log4js configure', (batch) => {
|
||||
config: {
|
||||
appenders: {
|
||||
console: {
|
||||
type: 'console',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
type: "console",
|
||||
layout: { type: "messagePassThrough" }
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: {
|
||||
appenders: ['console'],
|
||||
level: 'INFO'
|
||||
appenders: ["console"],
|
||||
level: "INFO"
|
||||
}
|
||||
}
|
||||
},
|
||||
readdirSync: dir => require('fs').readdirSync(dir),
|
||||
readdirSync: dir => require("fs").readdirSync(dir),
|
||||
readFileSync: (file, encoding) => {
|
||||
fileRead += 1;
|
||||
batch.type(file, 'string');
|
||||
batch.type(file, "string");
|
||||
batch.equal(file, modulePath);
|
||||
batch.equal(encoding, 'utf8');
|
||||
batch.equal(encoding, "utf8");
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: (path) => {
|
||||
statSync: path => {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: new Date() };
|
||||
}
|
||||
throw new Error('no such file');
|
||||
throw new Error("no such file");
|
||||
}
|
||||
};
|
||||
|
||||
@ -60,29 +58,45 @@ test('log4js configure', (batch) => {
|
||||
done();
|
||||
});
|
||||
|
||||
batch.test('when configuration file loaded via LOG4JS_CONFIG env variable', (t) => {
|
||||
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
|
||||
batch.test(
|
||||
"when configuration file loaded via LOG4JS_CONFIG env variable",
|
||||
t => {
|
||||
process.env.LOG4JS_CONFIG = "some/path/to/mylog4js.json";
|
||||
|
||||
const log4js = sandbox.require('../../lib/log4js', dependencies);
|
||||
const log4js = sandbox.require("../../lib/log4js", dependencies);
|
||||
|
||||
log4js.getLogger('test-logger');
|
||||
t.equal(fileRead, 1, 'should load the specified local config file');
|
||||
log4js.getLogger("test-logger");
|
||||
t.equal(fileRead, 1, "should load the specified local config file");
|
||||
|
||||
delete process.env.LOG4JS_CONFIG;
|
||||
delete process.env.LOG4JS_CONFIG;
|
||||
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('when configuration is set via configure() method call, return the log4js object', (t) => {
|
||||
const log4js = sandbox.require('../../lib/log4js', dependencies).configure(fakeFS.config);
|
||||
t.type(log4js, 'object', 'Configure method call should return the log4js object!');
|
||||
batch.test(
|
||||
"when configuration is set via configure() method call, return the log4js object",
|
||||
t => {
|
||||
const log4js = sandbox
|
||||
.require("../../lib/log4js", dependencies)
|
||||
.configure(fakeFS.config);
|
||||
t.type(
|
||||
log4js,
|
||||
"object",
|
||||
"Configure method call should return the log4js object!"
|
||||
);
|
||||
|
||||
const log = log4js.getLogger('daemon');
|
||||
t.type(log, 'object', 'log4js object, returned by configure(...) method should be able to create log object.');
|
||||
t.type(log.info, 'function');
|
||||
const log = log4js.getLogger("daemon");
|
||||
t.type(
|
||||
log,
|
||||
"object",
|
||||
"log4js object, returned by configure(...) method should be able to create log object."
|
||||
);
|
||||
t.type(log.info, "function");
|
||||
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
@ -1,43 +1,44 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const util = require('util');
|
||||
const path = require('path');
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const debug = require('debug')('log4js:test.configuration-validation');
|
||||
const deepFreeze = require('deep-freeze');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const configuration = require('../../lib/configuration');
|
||||
const { test } = require("tap");
|
||||
const util = require("util");
|
||||
const path = require("path");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const debug = require("debug")("log4js:test.configuration-validation");
|
||||
const deepFreeze = require("deep-freeze");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const configuration = require("../../lib/configuration");
|
||||
|
||||
const testAppender = (label, result) => ({
|
||||
configure: function (config, layouts, findAppender) {
|
||||
debug(`testAppender(${label}).configure called, with config: ${util.inspect(config)}`);
|
||||
configure(config, layouts, findAppender) {
|
||||
debug(
|
||||
`testAppender(${label}).configure called, with config: ${util.inspect(
|
||||
config
|
||||
)}`
|
||||
);
|
||||
result.configureCalled = true;
|
||||
result.type = config.type;
|
||||
result.label = label;
|
||||
result.config = config;
|
||||
result.layouts = layouts;
|
||||
result.findAppender = findAppender;
|
||||
return { };
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
test('log4js configuration validation', (batch) => {
|
||||
batch.test('should give error if config is just plain silly', (t) => {
|
||||
[null, undefined, '', ' ', []].forEach((config) => {
|
||||
test("log4js configuration validation", batch => {
|
||||
batch.test("should give error if config is just plain silly", t => {
|
||||
[null, undefined, "", " ", []].forEach(config => {
|
||||
const expectedError = new Error(
|
||||
`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`
|
||||
);
|
||||
t.throws(
|
||||
() => configuration.configure(config),
|
||||
expectedError
|
||||
`Problem with log4js configuration: (${util.inspect(
|
||||
config
|
||||
)}) - must be an object.`
|
||||
);
|
||||
t.throws(() => configuration.configure(config), expectedError);
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if config is an empty object', (t) => {
|
||||
batch.test("should give error if config is an empty object", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({}),
|
||||
'- must have a property "appenders" of type object.'
|
||||
@ -45,7 +46,7 @@ test('log4js configuration validation', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if config has no appenders', (t) => {
|
||||
batch.test("should give error if config has no appenders", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ categories: {} }),
|
||||
'- must have a property "appenders" of type object.'
|
||||
@ -53,15 +54,15 @@ test('log4js configuration validation', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if config has no categories', (t) => {
|
||||
batch.test("should give error if config has no categories", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ appenders: { out: { type: 'stdout' } } }),
|
||||
() => log4js.configure({ appenders: { out: { type: "stdout" } } }),
|
||||
'- must have a property "categories" of type object.'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if appenders is not an object', (t) => {
|
||||
batch.test("should give error if appenders is not an object", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ appenders: [], categories: [] }),
|
||||
'- must have a property "appenders" of type object.'
|
||||
@ -69,293 +70,342 @@ test('log4js configuration validation', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if appenders are not all valid', (t) => {
|
||||
batch.test("should give error if appenders are not all valid", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ appenders: { thing: 'cheese' }, categories: {} }),
|
||||
() =>
|
||||
log4js.configure({ appenders: { thing: "cheese" }, categories: {} }),
|
||||
'- appender "thing" is not valid (must be an object with property "type")'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should require at least one appender', (t) => {
|
||||
batch.test("should require at least one appender", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ appenders: {}, categories: {} }),
|
||||
'- must define at least one appender.'
|
||||
"- must define at least one appender."
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if categories are not all valid', (t) => {
|
||||
batch.test("should give error if categories are not all valid", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: { thing: 'cheese' } }),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { thing: "cheese" }
|
||||
}),
|
||||
'- category "thing" is not valid (must be an object with properties "appenders" and "level")'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if default category not defined', (t) => {
|
||||
batch.test("should give error if default category not defined", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { thing: { appenders: ['stdout'], level: 'ERROR' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { thing: { appenders: ["stdout"], level: "ERROR" } }
|
||||
}),
|
||||
'- must define a "default" category.'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should require at least one category', (t) => {
|
||||
batch.test("should require at least one category", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({ appenders: { stdout: { type: 'stdout' } }, categories: {} }),
|
||||
'- must define at least one category.'
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: {}
|
||||
}),
|
||||
"- must define at least one category."
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if category.appenders is not an array', (t) => {
|
||||
batch.test("should give error if category.appenders is not an array", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { thing: { appenders: {}, level: 'ERROR' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { thing: { appenders: {}, level: "ERROR" } }
|
||||
}),
|
||||
'- category "thing" is not valid (appenders must be an array of appender names)'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if category.appenders is empty', (t) => {
|
||||
batch.test("should give error if category.appenders is empty", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { thing: { appenders: [], level: 'ERROR' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { thing: { appenders: [], level: "ERROR" } }
|
||||
}),
|
||||
'- category "thing" is not valid (appenders must contain at least one appender name)'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if categories do not refer to valid appenders', (t) => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { thing: { appenders: ['cheese'], level: 'ERROR' } }
|
||||
}),
|
||||
'- category "thing" is not valid (appender "cheese" is not defined)'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
batch.test(
|
||||
"should give error if categories do not refer to valid appenders",
|
||||
t => {
|
||||
t.throws(
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { thing: { appenders: ["cheese"], level: "ERROR" } }
|
||||
}),
|
||||
'- category "thing" is not valid (appender "cheese" is not defined)'
|
||||
);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('should give error if category level is not valid', (t) => {
|
||||
batch.test("should give error if category level is not valid", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'Biscuits' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "Biscuits" } }
|
||||
}),
|
||||
'- category "default" is not valid (level "Biscuits" not recognised; valid levels are ALL, TRACE'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if category enableCallStack is not valid', (t) => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'Debug', enableCallStack: '123' } }
|
||||
}),
|
||||
'- category "default" is not valid (enableCallStack must be boolean type)'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
batch.test(
|
||||
"should give error if category enableCallStack is not valid",
|
||||
t => {
|
||||
t.throws(
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: {
|
||||
default: {
|
||||
appenders: ["stdout"],
|
||||
level: "Debug",
|
||||
enableCallStack: "123"
|
||||
}
|
||||
}
|
||||
}),
|
||||
'- category "default" is not valid (enableCallStack must be boolean type)'
|
||||
);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('should give error if appender type cannot be found', (t) => {
|
||||
batch.test("should give error if appender type cannot be found", t => {
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { thing: { type: "cheese" } },
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
}),
|
||||
'- appender "thing" is not valid (type "cheese" could not be found)'
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should create appender instances', (t) => {
|
||||
batch.test("should create appender instances", t => {
|
||||
const thing = {};
|
||||
const sandboxedLog4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
cheese: testAppender('cheesy', thing)
|
||||
},
|
||||
ignoreMissing: true
|
||||
}
|
||||
);
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
});
|
||||
|
||||
t.ok(thing.configureCalled);
|
||||
t.equal(thing.type, 'cheese');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should use provided appender instance if instance provided', (t) => {
|
||||
const thing = {};
|
||||
const cheese = testAppender('cheesy', thing);
|
||||
const sandboxedLog4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
ignoreMissing: true
|
||||
}
|
||||
);
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: cheese } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
});
|
||||
|
||||
t.ok(thing.configureCalled);
|
||||
t.same(thing.type, cheese);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should not throw error if configure object is freezed', (t) => {
|
||||
t.doesNotThrow(() => log4js.configure(deepFreeze({
|
||||
appenders: {
|
||||
dateFile: {
|
||||
type: 'dateFile', filename: 'test/tap/freeze-date-file-test', alwaysIncludePattern: false
|
||||
}
|
||||
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
cheese: testAppender("cheesy", thing)
|
||||
},
|
||||
categories: { default: { appenders: ['dateFile'], level: log4js.levels.ERROR } }
|
||||
})));
|
||||
ignoreMissing: true
|
||||
});
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: "cheese" } },
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
});
|
||||
|
||||
t.ok(thing.configureCalled);
|
||||
t.equal(thing.type, "cheese");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should load appenders from core first', (t) => {
|
||||
const result = {};
|
||||
const sandboxedLog4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./cheese': testAppender('correct', result),
|
||||
cheese: testAppender('wrong', result)
|
||||
},
|
||||
batch.test(
|
||||
"should use provided appender instance if instance provided",
|
||||
t => {
|
||||
const thing = {};
|
||||
const cheese = testAppender("cheesy", thing);
|
||||
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
|
||||
ignoreMissing: true
|
||||
}
|
||||
});
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: cheese } },
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
});
|
||||
|
||||
t.ok(thing.configureCalled);
|
||||
t.same(thing.type, cheese);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test("should not throw error if configure object is freezed", t => {
|
||||
t.doesNotThrow(() =>
|
||||
log4js.configure(
|
||||
deepFreeze({
|
||||
appenders: {
|
||||
dateFile: {
|
||||
type: "dateFile",
|
||||
filename: "test/tap/freeze-date-file-test",
|
||||
alwaysIncludePattern: false
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ["dateFile"], level: log4js.levels.ERROR }
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
});
|
||||
|
||||
t.ok(result.configureCalled);
|
||||
t.equal(result.type, 'cheese');
|
||||
t.equal(result.label, 'correct');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should load appenders relative to main file if not in core, or node_modules', (t) => {
|
||||
batch.test("should load appenders from core first", t => {
|
||||
const result = {};
|
||||
const mainPath = path.dirname(require.main.filename);
|
||||
const sandboxConfig = {
|
||||
ignoreMissing: true,
|
||||
requires: {}
|
||||
};
|
||||
sandboxConfig.requires[`${mainPath}/cheese`] = testAppender('correct', result);
|
||||
// add this one, because when we're running coverage the main path is a bit different
|
||||
sandboxConfig.requires[
|
||||
`${path.join(mainPath, '../../node_modules/nyc/bin/cheese')}`
|
||||
] = testAppender('correct', result);
|
||||
// in node v6, there's an extra layer of node modules for some reason, so add this one to work around it
|
||||
sandboxConfig.requires[
|
||||
`${path.join(mainPath, '../../node_modules/tap/node_modules/nyc/bin/cheese')}`
|
||||
] = testAppender('correct', result);
|
||||
|
||||
const sandboxedLog4js = sandbox.require('../../lib/log4js', sandboxConfig);
|
||||
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
"./cheese": testAppender("correct", result),
|
||||
cheese: testAppender("wrong", result)
|
||||
},
|
||||
ignoreMissing: true
|
||||
});
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
appenders: { thing: { type: "cheese" } },
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
});
|
||||
|
||||
t.ok(result.configureCalled);
|
||||
t.equal(result.type, 'cheese');
|
||||
t.equal(result.label, 'correct');
|
||||
t.equal(result.type, "cheese");
|
||||
t.equal(result.label, "correct");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should load appenders relative to process.cwd if not found in core, node_modules', (t) => {
|
||||
const result = {};
|
||||
const fakeProcess = new Proxy(process, {
|
||||
get(target, key) {
|
||||
if (key === 'cwd') {
|
||||
return () => '/var/lib/cheese';
|
||||
batch.test(
|
||||
"should load appenders relative to main file if not in core, or node_modules",
|
||||
t => {
|
||||
const result = {};
|
||||
const mainPath = path.dirname(require.main.filename);
|
||||
const sandboxConfig = {
|
||||
ignoreMissing: true,
|
||||
requires: {}
|
||||
};
|
||||
sandboxConfig.requires[`${mainPath}/cheese`] = testAppender(
|
||||
"correct",
|
||||
result
|
||||
);
|
||||
// add this one, because when we're running coverage the main path is a bit different
|
||||
sandboxConfig.requires[
|
||||
`${path.join(mainPath, "../../node_modules/nyc/bin/cheese")}`
|
||||
] = testAppender("correct", result);
|
||||
// in node v6, there's an extra layer of node modules for some reason, so add this one to work around it
|
||||
sandboxConfig.requires[
|
||||
`${path.join(
|
||||
mainPath,
|
||||
"../../node_modules/tap/node_modules/nyc/bin/cheese"
|
||||
)}`
|
||||
] = testAppender("correct", result);
|
||||
|
||||
const sandboxedLog4js = sandbox.require(
|
||||
"../../lib/log4js",
|
||||
sandboxConfig
|
||||
);
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: "cheese" } },
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
});
|
||||
|
||||
t.ok(result.configureCalled);
|
||||
t.equal(result.type, "cheese");
|
||||
t.equal(result.label, "correct");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test(
|
||||
"should load appenders relative to process.cwd if not found in core, node_modules",
|
||||
t => {
|
||||
const result = {};
|
||||
const fakeProcess = new Proxy(process, {
|
||||
get(target, key) {
|
||||
if (key === "cwd") {
|
||||
return () => "/var/lib/cheese";
|
||||
}
|
||||
|
||||
return target[key];
|
||||
}
|
||||
|
||||
return target[key];
|
||||
}
|
||||
});
|
||||
const sandboxedLog4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
});
|
||||
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
|
||||
ignoreMissing: true,
|
||||
requires: {
|
||||
'/var/lib/cheese/cheese': testAppender('correct', result),
|
||||
"/var/lib/cheese/cheese": testAppender("correct", result)
|
||||
},
|
||||
globals: {
|
||||
process: fakeProcess
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
});
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: "cheese" } },
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
});
|
||||
|
||||
t.ok(result.configureCalled);
|
||||
t.equal(result.type, 'cheese');
|
||||
t.equal(result.label, 'correct');
|
||||
t.end();
|
||||
});
|
||||
t.ok(result.configureCalled);
|
||||
t.equal(result.type, "cheese");
|
||||
t.equal(result.label, "correct");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('should pass config, layout, findAppender to appenders', (t) => {
|
||||
batch.test("should pass config, layout, findAppender to appenders", t => {
|
||||
const result = {};
|
||||
const sandboxedLog4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
ignoreMissing: true,
|
||||
requires: {
|
||||
cheese: testAppender('cheesy', result),
|
||||
notCheese: testAppender('notCheesy', {})
|
||||
}
|
||||
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
|
||||
ignoreMissing: true,
|
||||
requires: {
|
||||
cheese: testAppender("cheesy", result),
|
||||
notCheese: testAppender("notCheesy", {})
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
sandboxedLog4js.configure({
|
||||
appenders: { thing: { type: 'cheese', foo: 'bar' }, thing2: { type: 'notCheese' } },
|
||||
categories: { default: { appenders: ['thing'], level: 'ERROR' } }
|
||||
appenders: {
|
||||
thing: { type: "cheese", foo: "bar" },
|
||||
thing2: { type: "notCheese" }
|
||||
},
|
||||
categories: { default: { appenders: ["thing"], level: "ERROR" } }
|
||||
});
|
||||
|
||||
t.ok(result.configureCalled);
|
||||
t.equal(result.type, 'cheese');
|
||||
t.equal(result.config.foo, 'bar');
|
||||
t.type(result.layouts, 'object');
|
||||
t.type(result.layouts.basicLayout, 'function');
|
||||
t.type(result.findAppender, 'function');
|
||||
t.type(result.findAppender('thing2'), 'object');
|
||||
t.equal(result.type, "cheese");
|
||||
t.equal(result.config.foo, "bar");
|
||||
t.type(result.layouts, "object");
|
||||
t.type(result.layouts.basicLayout, "function");
|
||||
t.type(result.findAppender, "function");
|
||||
t.type(result.findAppender("thing2"), "object");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should not give error if level object is used instead of string', (t) => {
|
||||
t.doesNotThrow(() => log4js.configure({
|
||||
appenders: { thing: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['thing'], level: log4js.levels.ERROR } }
|
||||
}));
|
||||
t.end();
|
||||
});
|
||||
batch.test(
|
||||
"should not give error if level object is used instead of string",
|
||||
t => {
|
||||
t.doesNotThrow(() =>
|
||||
log4js.configure({
|
||||
appenders: { thing: { type: "stdout" } },
|
||||
categories: {
|
||||
default: { appenders: ["thing"], level: log4js.levels.ERROR }
|
||||
}
|
||||
})
|
||||
);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const EE = require('events').EventEmitter;
|
||||
const levels = require('../../lib/levels');
|
||||
const { test } = require("tap");
|
||||
const EE = require("events").EventEmitter;
|
||||
const levels = require("../../lib/levels");
|
||||
|
||||
class MockLogger {
|
||||
constructor() {
|
||||
@ -32,8 +30,8 @@ function MockRequest(remoteAddr, method, originalUrl) {
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.httpVersionMajor = "5";
|
||||
this.httpVersionMinor = "0";
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
@ -45,7 +43,7 @@ class MockResponse extends EE {
|
||||
}
|
||||
|
||||
end() {
|
||||
this.emit('finish');
|
||||
this.emit("finish");
|
||||
}
|
||||
|
||||
setHeader(key, value) {
|
||||
@ -61,23 +59,30 @@ class MockResponse extends EE {
|
||||
}
|
||||
}
|
||||
|
||||
test('log4js connect logger', (batch) => {
|
||||
const clm = require('../../lib/connect-logger');
|
||||
test("log4js connect logger", batch => {
|
||||
const clm = require("../../lib/connect-logger");
|
||||
|
||||
batch.test('with context config', (t) => {
|
||||
batch.test("with context config", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { context: true });
|
||||
|
||||
t.beforeEach((done) => { ml.contexts = []; done(); });
|
||||
t.beforeEach(done => {
|
||||
ml.contexts = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('response should be included in context', (assert) => {
|
||||
const contexts = ml.contexts;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
t.test("response should be included in context", assert => {
|
||||
const { contexts } = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.type(contexts, 'Array');
|
||||
assert.type(contexts, "Array");
|
||||
assert.equal(contexts.length, 1);
|
||||
assert.type(contexts[0].res, MockResponse);
|
||||
assert.end();
|
||||
@ -86,20 +91,27 @@ test('log4js connect logger', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('without context config', (t) => {
|
||||
batch.test("without context config", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { });
|
||||
const cl = clm(ml, {});
|
||||
|
||||
t.beforeEach((done) => { ml.contexts = []; done(); });
|
||||
t.beforeEach(done => {
|
||||
ml.contexts = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('response should not be included in context', (assert) => {
|
||||
const contexts = ml.contexts;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
t.test("response should not be included in context", assert => {
|
||||
const { contexts } = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.type(contexts, 'Array');
|
||||
assert.type(contexts, "Array");
|
||||
assert.equal(contexts.length, 1);
|
||||
assert.type(contexts[0].res, undefined);
|
||||
assert.end();
|
||||
|
||||
@ -1,19 +1,15 @@
|
||||
/* jshint maxparams:7 */
|
||||
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const EE = require('events').EventEmitter;
|
||||
const levels = require('../../lib/levels');
|
||||
const { test } = require("tap");
|
||||
const EE = require("events").EventEmitter;
|
||||
const levels = require("../../lib/levels");
|
||||
|
||||
class MockLogger {
|
||||
constructor() {
|
||||
this.level = levels.TRACE;
|
||||
this.messages = [];
|
||||
this.log = function (level, message) {
|
||||
this.messages.push({ level: level, message: message });
|
||||
this.log = function(level, message) {
|
||||
this.messages.push({ level, message });
|
||||
};
|
||||
this.isLevelEnabled = function (level) {
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(this.level);
|
||||
};
|
||||
}
|
||||
@ -24,8 +20,8 @@ function MockRequest(remoteAddr, method, originalUrl, headers, url, custom) {
|
||||
this.originalUrl = originalUrl;
|
||||
this.url = url;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.httpVersionMajor = "5";
|
||||
this.httpVersionMinor = "0";
|
||||
this.headers = headers || {};
|
||||
|
||||
if (custom) {
|
||||
@ -35,7 +31,7 @@ function MockRequest(remoteAddr, method, originalUrl, headers, url, custom) {
|
||||
}
|
||||
|
||||
const self = this;
|
||||
Object.keys(this.headers).forEach((key) => {
|
||||
Object.keys(this.headers).forEach(key => {
|
||||
self.headers[key.toLowerCase()] = self.headers[key];
|
||||
});
|
||||
}
|
||||
@ -47,7 +43,7 @@ class MockResponse extends EE {
|
||||
}
|
||||
|
||||
end() {
|
||||
this.emit('finish');
|
||||
this.emit("finish");
|
||||
}
|
||||
|
||||
setHeader(key, value) {
|
||||
@ -63,137 +59,157 @@ class MockResponse extends EE {
|
||||
}
|
||||
}
|
||||
|
||||
function request(cl, method, originalUrl, code, reqHeaders, resHeaders, next, url, custom = undefined) {
|
||||
const req = new MockRequest('my.remote.addr', method, originalUrl, reqHeaders, url, custom);
|
||||
function request(
|
||||
cl,
|
||||
method,
|
||||
originalUrl,
|
||||
code,
|
||||
reqHeaders,
|
||||
resHeaders,
|
||||
next,
|
||||
url,
|
||||
custom = undefined
|
||||
) {
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
method,
|
||||
originalUrl,
|
||||
reqHeaders,
|
||||
url,
|
||||
custom
|
||||
);
|
||||
const res = new MockResponse();
|
||||
if (next) {
|
||||
next = next.bind(null, req, res, () => { });
|
||||
next = next.bind(null, req, res, () => {});
|
||||
} else {
|
||||
next = () => { };
|
||||
next = () => {};
|
||||
}
|
||||
cl(req, res, next);
|
||||
res.writeHead(code, resHeaders);
|
||||
res.end('chunk', 'encoding');
|
||||
res.end("chunk", "encoding");
|
||||
}
|
||||
|
||||
test('log4js connect logger', (batch) => {
|
||||
const clm = require('../../lib/connect-logger');
|
||||
batch.test('getConnectLoggerModule', (t) => {
|
||||
t.type(clm, 'function', 'should return a connect logger factory');
|
||||
test("log4js connect logger", batch => {
|
||||
const clm = require("../../lib/connect-logger");
|
||||
batch.test("getConnectLoggerModule", t => {
|
||||
t.type(clm, "function", "should return a connect logger factory");
|
||||
|
||||
t.test('should take a log4js logger and return a "connect logger"', (assert) => {
|
||||
t.test(
|
||||
'should take a log4js logger and return a "connect logger"',
|
||||
assert => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml);
|
||||
|
||||
assert.type(cl, "function");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
t.test("log events", assert => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml);
|
||||
request(cl, "GET", "http://url", 200);
|
||||
|
||||
assert.type(cl, 'function');
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('log events', (assert) => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml);
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
|
||||
const messages = ml.messages;
|
||||
assert.type(messages, 'Array');
|
||||
const { messages } = ml;
|
||||
assert.type(messages, "Array");
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
assert.include(messages[0].message, "GET");
|
||||
assert.include(messages[0].message, "http://url");
|
||||
assert.include(messages[0].message, "my.remote.addr");
|
||||
assert.include(messages[0].message, "200");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('log events with level below logging level', (assert) => {
|
||||
t.test("log events with level below logging level", assert => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.FATAL;
|
||||
const cl = clm(ml);
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
request(cl, "GET", "http://url", 200);
|
||||
|
||||
assert.type(ml.messages, 'Array');
|
||||
assert.type(ml.messages, "Array");
|
||||
assert.equal(ml.messages.length, 0);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('log events with non-default level and custom format', (assert) => {
|
||||
t.test("log events with non-default level and custom format", assert => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
const cl = clm(ml, { level: levels.WARN, format: ":method :url" });
|
||||
request(cl, "GET", "http://url", 200);
|
||||
|
||||
const messages = ml.messages;
|
||||
const { messages } = ml;
|
||||
assert.type(messages, Array);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.WARN.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url');
|
||||
assert.equal(messages[0].message, "GET http://url");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('adding multiple loggers should only log once', (assert) => {
|
||||
t.test("adding multiple loggers should only log once", assert => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, { level: levels.WARN, format: ':method :url' });
|
||||
const nextLogger = clm(ml, { level: levels.INFO, format: ':method' });
|
||||
request(cl, 'GET', 'http://url', 200, null, null, nextLogger);
|
||||
const cl = clm(ml, { level: levels.WARN, format: ":method :url" });
|
||||
const nextLogger = clm(ml, { level: levels.INFO, format: ":method" });
|
||||
request(cl, "GET", "http://url", 200, null, null, nextLogger);
|
||||
|
||||
const messages = ml.messages;
|
||||
const { messages } = ml;
|
||||
assert.type(messages, Array);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.WARN.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url');
|
||||
assert.equal(messages[0].message, "GET http://url");
|
||||
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('logger with options as string', (t) => {
|
||||
batch.test("logger with options as string", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, ':method :url');
|
||||
request(cl, 'POST', 'http://meh', 200);
|
||||
const cl = clm(ml, ":method :url");
|
||||
request(cl, "POST", "http://meh", 200);
|
||||
|
||||
const messages = ml.messages;
|
||||
t.equal(messages[0].message, 'POST http://meh');
|
||||
const { messages } = ml;
|
||||
t.equal(messages[0].message, "POST http://meh");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('auto log levels', (t) => {
|
||||
batch.test("auto log levels", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, { level: 'auto', format: ':method :url' });
|
||||
request(cl, 'GET', 'http://meh', 200);
|
||||
request(cl, 'GET', 'http://meh', 201);
|
||||
request(cl, 'GET', 'http://meh', 302);
|
||||
request(cl, 'GET', 'http://meh', 404);
|
||||
request(cl, 'GET', 'http://meh', 500);
|
||||
const cl = clm(ml, { level: "auto", format: ":method :url" });
|
||||
request(cl, "GET", "http://meh", 200);
|
||||
request(cl, "GET", "http://meh", 201);
|
||||
request(cl, "GET", "http://meh", 302);
|
||||
request(cl, "GET", "http://meh", 404);
|
||||
request(cl, "GET", "http://meh", 500);
|
||||
|
||||
const messages = ml.messages;
|
||||
t.test('should use INFO for 2xx', (assert) => {
|
||||
const { messages } = ml;
|
||||
t.test("should use INFO for 2xx", assert => {
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.ok(levels.INFO.isEqualTo(messages[1].level));
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should use WARN for 3xx', (assert) => {
|
||||
t.test("should use WARN for 3xx", assert => {
|
||||
assert.ok(levels.WARN.isEqualTo(messages[2].level));
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should use ERROR for 4xx', (assert) => {
|
||||
t.test("should use ERROR for 4xx", assert => {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[3].level));
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should use ERROR for 5xx', (assert) => {
|
||||
t.test("should use ERROR for 5xx", assert => {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('logger with status code rules applied', (t) => {
|
||||
batch.test("logger with status code rules applied", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.DEBUG;
|
||||
const clr = [
|
||||
@ -201,178 +217,199 @@ test('log4js connect logger', (batch) => {
|
||||
{ from: 200, to: 299, level: levels.DEBUG.toString() },
|
||||
{ from: 300, to: 399, level: levels.INFO.toString() }
|
||||
];
|
||||
const cl = clm(ml, { level: 'auto', format: ':method :url', statusRules: clr });
|
||||
request(cl, 'GET', 'http://meh', 200);
|
||||
request(cl, 'GET', 'http://meh', 201);
|
||||
request(cl, 'GET', 'http://meh', 302);
|
||||
request(cl, 'GET', 'http://meh', 304);
|
||||
request(cl, 'GET', 'http://meh', 404);
|
||||
request(cl, 'GET', 'http://meh', 500);
|
||||
const cl = clm(ml, {
|
||||
level: "auto",
|
||||
format: ":method :url",
|
||||
statusRules: clr
|
||||
});
|
||||
request(cl, "GET", "http://meh", 200);
|
||||
request(cl, "GET", "http://meh", 201);
|
||||
request(cl, "GET", "http://meh", 302);
|
||||
request(cl, "GET", "http://meh", 304);
|
||||
request(cl, "GET", "http://meh", 404);
|
||||
request(cl, "GET", "http://meh", 500);
|
||||
|
||||
const messages = ml.messages;
|
||||
t.test('should use DEBUG for 2xx', (assert) => {
|
||||
const { messages } = ml;
|
||||
t.test("should use DEBUG for 2xx", assert => {
|
||||
assert.ok(levels.DEBUG.isEqualTo(messages[0].level));
|
||||
assert.ok(levels.DEBUG.isEqualTo(messages[1].level));
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should use WARN for 3xx, DEBUG for 304', (assert) => {
|
||||
t.test("should use WARN for 3xx, DEBUG for 304", assert => {
|
||||
assert.ok(levels.INFO.isEqualTo(messages[2].level));
|
||||
assert.ok(levels.DEBUG.isEqualTo(messages[3].level));
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should use ERROR for 4xx', (assert) => {
|
||||
t.test("should use ERROR for 4xx", assert => {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should use ERROR for 5xx', (assert) => {
|
||||
t.test("should use ERROR for 5xx", assert => {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[5].level));
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('format using a function', (t) => {
|
||||
batch.test("format using a function", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, () => 'I was called');
|
||||
request(cl, 'GET', 'http://blah', 200);
|
||||
const cl = clm(ml, () => "I was called");
|
||||
request(cl, "GET", "http://blah", 200);
|
||||
|
||||
t.equal(ml.messages[0].message, 'I was called');
|
||||
t.equal(ml.messages[0].message, "I was called");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('format using a function that also uses tokens', (t) => {
|
||||
batch.test("format using a function that also uses tokens", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, (req, res, tokenReplacer) => `${req.method} ${tokenReplacer(':status')}`);
|
||||
request(cl, 'GET', 'http://blah', 200);
|
||||
|
||||
t.equal(ml.messages[0].message, 'GET 200');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('format using a function, but do not log anything if the function returns nothing', (t) => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, () => null);
|
||||
request(cl, 'GET', 'http://blah', 200);
|
||||
|
||||
t.equal(ml.messages.length, 0);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('format that includes request headers', (t) => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, ':req[Content-Type]');
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
{ 'Content-Type': 'application/json' }
|
||||
const cl = clm(
|
||||
ml,
|
||||
(req, res, tokenReplacer) => `${req.method} ${tokenReplacer(":status")}`
|
||||
);
|
||||
request(cl, "GET", "http://blah", 200);
|
||||
|
||||
t.equal(ml.messages[0].message, 'application/json');
|
||||
t.equal(ml.messages[0].message, "GET 200");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('format that includes response headers', (t) => {
|
||||
batch.test(
|
||||
"format using a function, but do not log anything if the function returns nothing",
|
||||
t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, () => null);
|
||||
request(cl, "GET", "http://blah", 200);
|
||||
|
||||
t.equal(ml.messages.length, 0);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test("format that includes request headers", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, ':res[Content-Type]');
|
||||
const cl = clm(ml, ":req[Content-Type]");
|
||||
request(cl, "GET", "http://blah", 200, {
|
||||
"Content-Type": "application/json"
|
||||
});
|
||||
|
||||
t.equal(ml.messages[0].message, "application/json");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("format that includes response headers", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, ":res[Content-Type]");
|
||||
request(cl, "GET", "http://blah", 200, null, {
|
||||
"Content-Type": "application/cheese"
|
||||
});
|
||||
|
||||
t.equal(ml.messages[0].message, "application/cheese");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("url token should check originalUrl and url", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, ":url");
|
||||
request(cl, "GET", null, 200, null, null, null, "http://cheese");
|
||||
|
||||
t.equal(ml.messages[0].message, "http://cheese");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("log events with custom token", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, {
|
||||
level: levels.INFO,
|
||||
format: ":method :url :custom_string",
|
||||
tokens: [
|
||||
{
|
||||
token: ":custom_string",
|
||||
replacement: "fooBAR"
|
||||
}
|
||||
]
|
||||
});
|
||||
request(cl, "GET", "http://url", 200);
|
||||
|
||||
t.type(ml.messages, "Array");
|
||||
t.equal(ml.messages.length, 1);
|
||||
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
|
||||
t.equal(ml.messages[0].message, "GET http://url fooBAR");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("log events with custom override token", t => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, {
|
||||
level: levels.INFO,
|
||||
format: ":method :url :date",
|
||||
tokens: [
|
||||
{
|
||||
token: ":date",
|
||||
replacement: "20150310"
|
||||
}
|
||||
]
|
||||
});
|
||||
request(cl, "GET", "http://url", 200);
|
||||
|
||||
t.type(ml.messages, "Array");
|
||||
t.equal(ml.messages.length, 1);
|
||||
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
|
||||
t.equal(ml.messages[0].message, "GET http://url 20150310");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("log events with custom format", t => {
|
||||
const ml = new MockLogger();
|
||||
const body = { say: "hi!" };
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, {
|
||||
level: levels.INFO,
|
||||
format: (req, res, format) =>
|
||||
format(`:method :url ${JSON.stringify(req.body)}`)
|
||||
});
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
"POST",
|
||||
"http://url",
|
||||
200,
|
||||
{ "Content-Type": "application/json" },
|
||||
null,
|
||||
{ 'Content-Type': 'application/cheese' }
|
||||
null,
|
||||
null,
|
||||
{ body }
|
||||
);
|
||||
|
||||
t.equal(ml.messages[0].message, 'application/cheese');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('url token should check originalUrl and url', (t) => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, ':url');
|
||||
request(cl, 'GET', null, 200, null, null, null, 'http://cheese');
|
||||
|
||||
t.equal(ml.messages[0].message, 'http://cheese');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('log events with custom token', (t) => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :custom_string',
|
||||
tokens: [
|
||||
{
|
||||
token: ':custom_string', replacement: 'fooBAR'
|
||||
}
|
||||
]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
|
||||
t.type(ml.messages, 'Array');
|
||||
t.equal(ml.messages.length, 1);
|
||||
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
|
||||
t.equal(ml.messages[0].message, 'GET http://url fooBAR');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('log events with custom override token', (t) => {
|
||||
const ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :date',
|
||||
tokens: [
|
||||
{
|
||||
token: ':date', replacement: '20150310'
|
||||
}
|
||||
]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
|
||||
t.type(ml.messages, 'Array');
|
||||
t.equal(ml.messages.length, 1);
|
||||
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
|
||||
t.equal(ml.messages[0].message, 'GET http://url 20150310');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('log events with custom format', (t) => {
|
||||
const ml = new MockLogger();
|
||||
const body = { say: 'hi!' };
|
||||
ml.level = levels.INFO;
|
||||
const cl = clm(ml, {
|
||||
level: levels.INFO,
|
||||
format: (req, res, format) => (format(`:method :url ${JSON.stringify(req.body)}`))
|
||||
});
|
||||
request(cl, 'POST', 'http://url', 200, { 'Content-Type': 'application/json' }, null, null, null, { body: body });
|
||||
|
||||
t.ok(levels.INFO.isEqualTo(ml.messages[0].level));
|
||||
t.equal(ml.messages[0].message, `POST http://url ${JSON.stringify(body)}`);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('handle weird old node versions where socket contains socket', (t) => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, ':remote-addr');
|
||||
const req = new MockRequest(null, 'GET', 'http://blah');
|
||||
req.socket = { socket: { remoteAddress: 'this is weird' } };
|
||||
batch.test(
|
||||
"handle weird old node versions where socket contains socket",
|
||||
t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, ":remote-addr");
|
||||
const req = new MockRequest(null, "GET", "http://blah");
|
||||
req.socket = { socket: { remoteAddress: "this is weird" } };
|
||||
|
||||
const res = new MockResponse();
|
||||
cl(req, res, () => { });
|
||||
res.writeHead(200, {});
|
||||
res.end('chunk', 'encoding');
|
||||
const res = new MockResponse();
|
||||
cl(req, res, () => {});
|
||||
res.writeHead(200, {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
t.equal(ml.messages[0].message, 'this is weird');
|
||||
t.end();
|
||||
});
|
||||
t.equal(ml.messages[0].message, "this is weird");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
@ -1,19 +1,17 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const EE = require('events').EventEmitter;
|
||||
const levels = require('../../lib/levels');
|
||||
const { test } = require("tap");
|
||||
const EE = require("events").EventEmitter;
|
||||
const levels = require("../../lib/levels");
|
||||
|
||||
class MockLogger {
|
||||
constructor() {
|
||||
this.messages = [];
|
||||
this.level = levels.TRACE;
|
||||
|
||||
this.log = function (level, message) {
|
||||
this.messages.push({ level: level, message: message });
|
||||
this.log = function(level, message) {
|
||||
this.messages.push({ level, message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function (level) {
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(this.level);
|
||||
};
|
||||
}
|
||||
@ -23,8 +21,8 @@ function MockRequest(remoteAddr, method, originalUrl) {
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.httpVersionMajor = "5";
|
||||
this.httpVersionMinor = "0";
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
@ -36,7 +34,7 @@ class MockResponse extends EE {
|
||||
}
|
||||
|
||||
end() {
|
||||
this.emit('finish');
|
||||
this.emit("finish");
|
||||
}
|
||||
|
||||
setHeader(key, value) {
|
||||
@ -52,85 +50,111 @@ class MockResponse extends EE {
|
||||
}
|
||||
}
|
||||
|
||||
test('log4js connect logger', (batch) => {
|
||||
const clm = require('../../lib/connect-logger');
|
||||
test("log4js connect logger", batch => {
|
||||
const clm = require("../../lib/connect-logger");
|
||||
|
||||
batch.test('with nolog config', (t) => {
|
||||
batch.test("with nolog config", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { nolog: '\\.gif' });
|
||||
const cl = clm(ml, { nolog: "\\.gif" });
|
||||
|
||||
t.beforeEach((done) => { ml.messages = []; done(); });
|
||||
|
||||
t.test('check unmatch url request', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
|
||||
assert.type(messages, 'Array');
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
assert.end();
|
||||
t.beforeEach(done => {
|
||||
ml.messages = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('check match url request', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
t.test("check unmatch url request", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => {});
|
||||
res.end('chunk', 'encoding');
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.type(messages, 'Array');
|
||||
assert.type(messages, "Array");
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, "GET");
|
||||
assert.include(messages[0].message, "http://url");
|
||||
assert.include(messages[0].message, "my.remote.addr");
|
||||
assert.include(messages[0].message, "200");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test("check match url request", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.gif"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.type(messages, "Array");
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('nolog Strings', (t) => {
|
||||
batch.test("nolog Strings", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { nolog: '\\.gif|\\.jpe?g' });
|
||||
const cl = clm(ml, { nolog: "\\.gif|\\.jpe?g" });
|
||||
|
||||
t.beforeEach((done) => { ml.messages = []; done(); });
|
||||
t.beforeEach(done => {
|
||||
ml.messages = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('check unmatch url request (png)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
t.test("check unmatch url request (png)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
assert.include(messages[0].message, "GET");
|
||||
assert.include(messages[0].message, "http://url");
|
||||
assert.include(messages[0].message, "my.remote.addr");
|
||||
assert.include(messages[0].message, "200");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (gif)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif');
|
||||
t.test("check match url request (gif)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.gif"
|
||||
);
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (jpeg)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg');
|
||||
t.test("check match url request (jpeg)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.jpeg"
|
||||
);
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
@ -139,45 +163,60 @@ test('log4js connect logger', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('nolog Array<String>', (t) => {
|
||||
batch.test("nolog Array<String>", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { nolog: ['\\.gif', '\\.jpe?g'] });
|
||||
const cl = clm(ml, { nolog: ["\\.gif", "\\.jpe?g"] });
|
||||
|
||||
t.beforeEach((done) => { ml.messages = []; done(); });
|
||||
t.beforeEach(done => {
|
||||
ml.messages = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('check unmatch url request (png)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
t.test("check unmatch url request (png)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
assert.include(messages[0].message, "GET");
|
||||
assert.include(messages[0].message, "http://url");
|
||||
assert.include(messages[0].message, "my.remote.addr");
|
||||
assert.include(messages[0].message, "200");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (gif)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
t.test("check match url request (gif)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.gif"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (jpeg)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
t.test("check match url request (jpeg)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.jpeg"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
@ -186,45 +225,60 @@ test('log4js connect logger', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('nolog RegExp', (t) => {
|
||||
batch.test("nolog RegExp", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { nolog: /\.gif|\.jpe?g/ });
|
||||
|
||||
t.beforeEach((done) => { ml.messages = []; done(); });
|
||||
t.beforeEach(done => {
|
||||
ml.messages = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('check unmatch url request (png)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
t.test("check unmatch url request (png)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
assert.include(messages[0].message, "GET");
|
||||
assert.include(messages[0].message, "http://url");
|
||||
assert.include(messages[0].message, "my.remote.addr");
|
||||
assert.include(messages[0].message, "200");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (gif)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
t.test("check match url request (gif)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.gif"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => {});
|
||||
res.end('chunk', 'encoding');
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (jpeg)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
t.test("check match url request (jpeg)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.jpeg"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => {});
|
||||
res.end('chunk', 'encoding');
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
@ -233,45 +287,60 @@ test('log4js connect logger', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('nolog Array<RegExp>', (t) => {
|
||||
batch.test("nolog Array<RegExp>", t => {
|
||||
const ml = new MockLogger();
|
||||
const cl = clm(ml, { nolog: [/\.gif/, /\.jpe?g/] });
|
||||
|
||||
t.beforeEach((done) => { ml.messages = []; done(); });
|
||||
t.beforeEach(done => {
|
||||
ml.messages = [];
|
||||
done();
|
||||
});
|
||||
|
||||
t.test('check unmatch url request (png)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
t.test("check unmatch url request (png)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.png"
|
||||
); // not gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => { });
|
||||
res.end('chunk', 'encoding');
|
||||
cl(req, res, () => {});
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
assert.include(messages[0].message, "GET");
|
||||
assert.include(messages[0].message, "http://url");
|
||||
assert.include(messages[0].message, "my.remote.addr");
|
||||
assert.include(messages[0].message, "200");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (gif)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
t.test("check match url request (gif)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.gif"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => {});
|
||||
res.end('chunk', 'encoding');
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('check match url request (jpeg)', (assert) => {
|
||||
const messages = ml.messages;
|
||||
const req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
t.test("check match url request (jpeg)", assert => {
|
||||
const {messages} = ml;
|
||||
const req = new MockRequest(
|
||||
"my.remote.addr",
|
||||
"GET",
|
||||
"http://url/hoge.jpeg"
|
||||
); // gif
|
||||
const res = new MockResponse(200);
|
||||
cl(req, res, () => {});
|
||||
res.end('chunk', 'encoding');
|
||||
res.end("chunk", "encoding");
|
||||
|
||||
assert.equal(messages.length, 0);
|
||||
assert.end();
|
||||
|
||||
@ -1,64 +1,58 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const consoleAppender = require("../../lib/appenders/console");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const consoleAppender = require('../../lib/appenders/console');
|
||||
|
||||
test('log4js console appender', (batch) => {
|
||||
batch.test('should export a configure function', (t) => {
|
||||
t.type(consoleAppender.configure, 'function');
|
||||
test("log4js console appender", batch => {
|
||||
batch.test("should export a configure function", t => {
|
||||
t.type(consoleAppender.configure, "function");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should use default layout if none specified', (t) => {
|
||||
batch.test("should use default layout if none specified", t => {
|
||||
const messages = [];
|
||||
const fakeConsole = {
|
||||
log: function (msg) {
|
||||
log(msg) {
|
||||
messages.push(msg);
|
||||
}
|
||||
};
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: { console: { type: 'console' } },
|
||||
categories: { default: { appenders: ['console'], level: 'DEBUG' } }
|
||||
appenders: { console: { type: "console" } },
|
||||
categories: { default: { appenders: ["console"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
log4js.getLogger().info('blah');
|
||||
log4js.getLogger().info("blah");
|
||||
|
||||
t.match(messages[0], /.*default.*blah/);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should output to console', (t) => {
|
||||
batch.test("should output to console", t => {
|
||||
const messages = [];
|
||||
const fakeConsole = {
|
||||
log: function (msg) {
|
||||
log(msg) {
|
||||
messages.push(msg);
|
||||
}
|
||||
};
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: { console: { type: 'console', layout: { type: 'messagePassThrough' } } },
|
||||
categories: { default: { appenders: ['console'], level: 'DEBUG' } }
|
||||
appenders: {
|
||||
console: { type: "console", layout: { type: "messagePassThrough" } }
|
||||
},
|
||||
categories: { default: { appenders: ["console"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
log4js.getLogger().info('blah');
|
||||
log4js.getLogger().info("blah");
|
||||
|
||||
t.equal(messages[0], 'blah');
|
||||
t.equal(messages[0], "blah");
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const EOL = require('os').EOL || '\n';
|
||||
const format = require('date-format');
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const { test } = require("tap");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const EOL = require("os").EOL || "\n";
|
||||
const format = require("date-format");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
function removeFile(filename) {
|
||||
try {
|
||||
@ -16,22 +14,24 @@ function removeFile(filename) {
|
||||
}
|
||||
}
|
||||
|
||||
test('../../lib/appenders/dateFile', (batch) => {
|
||||
batch.test('with default settings', (t) => {
|
||||
const testFile = path.join(__dirname, 'date-appender-default.log');
|
||||
test("../../lib/appenders/dateFile", batch => {
|
||||
batch.test("with default settings", t => {
|
||||
const testFile = path.join(__dirname, "date-appender-default.log");
|
||||
log4js.configure({
|
||||
appenders: { date: { type: 'dateFile', filename: testFile } },
|
||||
categories: { default: { appenders: ['date'], level: 'DEBUG' } }
|
||||
appenders: { date: { type: "dateFile", filename: testFile } },
|
||||
categories: { default: { appenders: ["date"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger('default-settings');
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
|
||||
logger.info('This should be in the file.');
|
||||
t.teardown(() => { removeFile('date-appender-default.log'); });
|
||||
logger.info("This should be in the file.");
|
||||
t.teardown(() => {
|
||||
removeFile("date-appender-default.log");
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
fs.readFile(testFile, 'utf8', (err, contents) => {
|
||||
t.include(contents, 'This should be in the file');
|
||||
fs.readFile(testFile, "utf8", (err, contents) => {
|
||||
t.include(contents, "This should be in the file");
|
||||
t.match(
|
||||
contents,
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
@ -41,88 +41,111 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
}, 100);
|
||||
});
|
||||
|
||||
batch.test('configure with dateFileAppender', (t) => {
|
||||
batch.test("configure with dateFileAppender", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
date: {
|
||||
type: 'dateFile',
|
||||
filename: 'test/tap/date-file-test.log',
|
||||
pattern: '-yyyy-MM-dd',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
type: "dateFile",
|
||||
filename: "test/tap/date-file-test.log",
|
||||
pattern: "-yyyy-MM-dd",
|
||||
layout: { type: "messagePassThrough" }
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['date'], level: 'WARN' } }
|
||||
categories: { default: { appenders: ["date"], level: "WARN" } }
|
||||
});
|
||||
const logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
const logger = log4js.getLogger("tests");
|
||||
logger.info("this should not be written to the file");
|
||||
logger.warn("this should be written to the file");
|
||||
|
||||
log4js.shutdown(() => {
|
||||
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', (err, contents) => {
|
||||
t.include(contents, `this should be written to the file${EOL}`);
|
||||
t.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
t.end();
|
||||
});
|
||||
fs.readFile(
|
||||
path.join(__dirname, "date-file-test.log"),
|
||||
"utf8",
|
||||
(err, contents) => {
|
||||
t.include(contents, `this should be written to the file${EOL}`);
|
||||
t.equal(
|
||||
contents.indexOf("this should not be written to the file"),
|
||||
-1
|
||||
);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
t.teardown(() => { removeFile('date-file-test.log'); });
|
||||
t.teardown(() => {
|
||||
removeFile("date-file-test.log");
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('configure with options.alwaysIncludePattern', (t) => {
|
||||
batch.test("configure with options.alwaysIncludePattern", t => {
|
||||
const options = {
|
||||
appenders: {
|
||||
date: {
|
||||
category: 'tests',
|
||||
type: 'dateFile',
|
||||
filename: 'test/tap/date-file-test',
|
||||
pattern: 'yyyy-MM-dd.log',
|
||||
category: "tests",
|
||||
type: "dateFile",
|
||||
filename: "test/tap/date-file-test",
|
||||
pattern: "yyyy-MM-dd.log",
|
||||
alwaysIncludePattern: true,
|
||||
layout: {
|
||||
type: 'messagePassThrough'
|
||||
type: "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['date'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["date"], level: "debug" } }
|
||||
};
|
||||
|
||||
const thisTime = format.asString(options.appenders.date.pattern, new Date());
|
||||
const existingFile = path.join(process.cwd(), 'test/tap/', `date-file-test.${thisTime}`);
|
||||
fs.writeFileSync(
|
||||
existingFile,
|
||||
`this is existing data${EOL}`,
|
||||
'utf8'
|
||||
const thisTime = format.asString(
|
||||
options.appenders.date.pattern,
|
||||
new Date()
|
||||
);
|
||||
const existingFile = path.join(
|
||||
process.cwd(),
|
||||
"test/tap/",
|
||||
`date-file-test.${thisTime}`
|
||||
);
|
||||
fs.writeFileSync(existingFile, `this is existing data${EOL}`, "utf8");
|
||||
log4js.configure(options);
|
||||
const logger = log4js.getLogger('tests');
|
||||
logger.warn('this should be written to the file with the appended date');
|
||||
const logger = log4js.getLogger("tests");
|
||||
logger.warn("this should be written to the file with the appended date");
|
||||
|
||||
t.teardown(() => { removeFile(existingFile); });
|
||||
t.teardown(() => {
|
||||
removeFile(existingFile);
|
||||
});
|
||||
|
||||
// wait for filesystem to catch up
|
||||
log4js.shutdown(() => {
|
||||
fs.readFile(existingFile, 'utf8', (err, contents) => {
|
||||
t.include(contents, 'this is existing data', 'should not overwrite the file on open (issue #132)');
|
||||
t.include(contents, 'this should be written to the file with the appended date');
|
||||
fs.readFile(existingFile, "utf8", (err, contents) => {
|
||||
t.include(
|
||||
contents,
|
||||
"this is existing data",
|
||||
"should not overwrite the file on open (issue #132)"
|
||||
);
|
||||
t.include(
|
||||
contents,
|
||||
"this should be written to the file with the appended date"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('should flush logs on shutdown', (t) => {
|
||||
const testFile = path.join(__dirname, 'date-appender-default.log');
|
||||
batch.test("should flush logs on shutdown", t => {
|
||||
const testFile = path.join(__dirname, "date-appender-default.log");
|
||||
log4js.configure({
|
||||
appenders: { test: { type: 'dateFile', filename: testFile } },
|
||||
categories: { default: { appenders: ['test'], level: 'trace' } }
|
||||
appenders: { test: { type: "dateFile", filename: testFile } },
|
||||
categories: { default: { appenders: ["test"], level: "trace" } }
|
||||
});
|
||||
const logger = log4js.getLogger('default-settings');
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
|
||||
logger.info('1');
|
||||
logger.info('2');
|
||||
logger.info('3');
|
||||
t.teardown(() => { removeFile('date-appender-default.log'); });
|
||||
logger.info("1");
|
||||
logger.info("2");
|
||||
logger.info("3");
|
||||
t.teardown(() => {
|
||||
removeFile("date-appender-default.log");
|
||||
});
|
||||
|
||||
log4js.shutdown(() => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
// 3 lines of output, plus the trailing newline.
|
||||
t.equal(fileContents.split(EOL).length, 4);
|
||||
t.match(
|
||||
@ -134,7 +157,7 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('should map maxLogSize to maxSize', (t) => {
|
||||
batch.test("should map maxLogSize to maxSize", t => {
|
||||
const fakeStreamroller = {};
|
||||
class DateRollingFileStream {
|
||||
constructor(filename, pattern, options) {
|
||||
@ -144,12 +167,20 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
}
|
||||
}
|
||||
fakeStreamroller.DateRollingFileStream = DateRollingFileStream;
|
||||
const dateFileAppenderModule = sandbox.require('../../lib/appenders/dateFile', {
|
||||
requires: { streamroller: fakeStreamroller }
|
||||
});
|
||||
dateFileAppenderModule.configure({
|
||||
filename: 'cheese.log', pattern: 'yyyy', maxLogSize: 100
|
||||
}, { basicLayout: () => {} });
|
||||
const dateFileAppenderModule = sandbox.require(
|
||||
"../../lib/appenders/dateFile",
|
||||
{
|
||||
requires: { streamroller: fakeStreamroller }
|
||||
}
|
||||
);
|
||||
dateFileAppenderModule.configure(
|
||||
{
|
||||
filename: "cheese.log",
|
||||
pattern: "yyyy",
|
||||
maxLogSize: 100
|
||||
},
|
||||
{ basicLayout: () => {} }
|
||||
);
|
||||
|
||||
t.equal(fakeStreamroller.options.maxSize, 100);
|
||||
t.end();
|
||||
|
||||
@ -1,43 +1,46 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
|
||||
test('default settings', (t) => {
|
||||
test("default settings", t => {
|
||||
const output = [];
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/stdout': {
|
||||
name: 'stdout',
|
||||
appender: function () {
|
||||
return function (evt) {
|
||||
output.push(evt);
|
||||
};
|
||||
},
|
||||
configure: function () {
|
||||
return this.appender();
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
"./appenders/stdout": {
|
||||
name: "stdout",
|
||||
appender() {
|
||||
return function(evt) {
|
||||
output.push(evt);
|
||||
};
|
||||
},
|
||||
configure() {
|
||||
return this.appender();
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger('default-settings');
|
||||
logger.info('This should not be logged yet.');
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
logger.info("This should not be logged yet.");
|
||||
|
||||
t.plan(3);
|
||||
t.equal(output.length, 0, 'Nothing should be logged until configure is called.');
|
||||
t.equal(
|
||||
output.length,
|
||||
0,
|
||||
"Nothing should be logged until configure is called."
|
||||
);
|
||||
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'debug' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "debug" } }
|
||||
});
|
||||
logger.info('This should go to stdout.');
|
||||
logger.info("This should go to stdout.");
|
||||
|
||||
t.equal(output.length, 1, 'It should log to stdout.');
|
||||
t.equal(output[0].data[0], 'This should go to stdout.', 'It should log the message.');
|
||||
t.equal(output.length, 1, "It should log to stdout.");
|
||||
t.equal(
|
||||
output[0].data[0],
|
||||
"This should go to stdout.",
|
||||
"It should log the message."
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -1,36 +1,34 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const cluster = require('cluster');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recorder = require('../../lib/appenders/recording');
|
||||
const { test } = require("tap");
|
||||
const cluster = require("cluster");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recorder = require("../../lib/appenders/recording");
|
||||
|
||||
cluster.removeAllListeners();
|
||||
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
vcr: { type: 'recording' }
|
||||
vcr: { type: "recording" }
|
||||
},
|
||||
categories: { default: { appenders: ['vcr'], level: 'debug' } },
|
||||
categories: { default: { appenders: ["vcr"], level: "debug" } },
|
||||
disableClustering: true
|
||||
});
|
||||
|
||||
if (cluster.isMaster) {
|
||||
cluster.fork();
|
||||
|
||||
const masterLogger = log4js.getLogger('master');
|
||||
const masterLogger = log4js.getLogger("master");
|
||||
const masterPid = process.pid;
|
||||
masterLogger.info('this is master');
|
||||
masterLogger.info("this is master");
|
||||
|
||||
cluster.on('exit', () => {
|
||||
cluster.on("exit", () => {
|
||||
const logEvents = recorder.replay();
|
||||
|
||||
test('cluster master', (batch) => {
|
||||
batch.test('only master events should be logged', (t) => {
|
||||
test("cluster master", batch => {
|
||||
batch.test("only master events should be logged", t => {
|
||||
t.equal(logEvents.length, 1);
|
||||
t.equal(logEvents[0].categoryName, 'master');
|
||||
t.equal(logEvents[0].categoryName, "master");
|
||||
t.equal(logEvents[0].pid, masterPid);
|
||||
t.equal(logEvents[0].data[0], 'this is master');
|
||||
t.equal(logEvents[0].data[0], "this is master");
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -38,22 +36,22 @@ if (cluster.isMaster) {
|
||||
});
|
||||
});
|
||||
} else {
|
||||
const workerLogger = log4js.getLogger('worker');
|
||||
workerLogger.info('this is worker', new Error('oh dear'));
|
||||
const workerLogger = log4js.getLogger("worker");
|
||||
workerLogger.info("this is worker", new Error("oh dear"));
|
||||
|
||||
const workerEvents = recorder.replay();
|
||||
test('cluster worker', (batch) => {
|
||||
batch.test('should send events to its own appender', (t) => {
|
||||
test("cluster worker", batch => {
|
||||
batch.test("should send events to its own appender", t => {
|
||||
t.equal(workerEvents.length, 1);
|
||||
t.equal(workerEvents[0].categoryName, 'worker');
|
||||
t.equal(workerEvents[0].data[0], 'this is worker');
|
||||
t.type(workerEvents[0].data[1], 'Error');
|
||||
t.contains(workerEvents[0].data[1].stack, 'Error: oh dear');
|
||||
t.equal(workerEvents[0].categoryName, "worker");
|
||||
t.equal(workerEvents[0].data[0], "this is worker");
|
||||
t.type(workerEvents[0].data[1], "Error");
|
||||
t.contains(workerEvents[0].data[1].stack, "Error: oh dear");
|
||||
t.end();
|
||||
});
|
||||
batch.end();
|
||||
});
|
||||
// test sending a cluster-style log message
|
||||
process.send({ topic: 'log4js:message', data: { cheese: 'gouda' } });
|
||||
process.send({ topic: "log4js:message", data: { cheese: "gouda" } });
|
||||
cluster.worker.disconnect();
|
||||
}
|
||||
|
||||
@ -1,15 +1,12 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
|
||||
test('file appender SIGHUP', (t) => {
|
||||
test("file appender SIGHUP", t => {
|
||||
let closeCalled = 0;
|
||||
let openCalled = 0;
|
||||
|
||||
const appender = sandbox.require(
|
||||
'../../lib/appenders/file',
|
||||
{
|
||||
const appender = sandbox
|
||||
.require("../../lib/appenders/file", {
|
||||
requires: {
|
||||
streamroller: {
|
||||
RollingFileStream: class RollingFileStream {
|
||||
@ -19,7 +16,7 @@ test('file appender SIGHUP', (t) => {
|
||||
}
|
||||
|
||||
on() {
|
||||
this.dummy = 'easier than turning off lint rule';
|
||||
this.dummy = "easier than turning off lint rule";
|
||||
}
|
||||
|
||||
end(cb) {
|
||||
@ -30,39 +27,46 @@ test('file appender SIGHUP', (t) => {
|
||||
|
||||
write() {
|
||||
if (this.ended) {
|
||||
throw new Error('write after end');
|
||||
throw new Error("write after end");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).configure({ type: 'file', filename: 'sighup-test-file' }, { basicLayout: function () { return 'whatever'; } });
|
||||
})
|
||||
.configure(
|
||||
{ type: "file", filename: "sighup-test-file" },
|
||||
{
|
||||
basicLayout() {
|
||||
return "whatever";
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
appender('something to log');
|
||||
process.kill(process.pid, 'SIGHUP');
|
||||
appender("something to log");
|
||||
process.kill(process.pid, "SIGHUP");
|
||||
|
||||
t.plan(2);
|
||||
setTimeout(() => {
|
||||
appender('something to log after sighup');
|
||||
t.equal(openCalled, 2, 'open should be called twice');
|
||||
t.equal(closeCalled, 1, 'close should be called once');
|
||||
appender("something to log after sighup");
|
||||
t.equal(openCalled, 2, "open should be called twice");
|
||||
t.equal(closeCalled, 1, "close should be called once");
|
||||
t.end();
|
||||
}, 100);
|
||||
});
|
||||
|
||||
test('file appender SIGHUP handler leak', (t) => {
|
||||
const log4js = require('../../lib/log4js');
|
||||
const initialListeners = process.listenerCount('SIGHUP');
|
||||
test("file appender SIGHUP handler leak", t => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
const initialListeners = process.listenerCount("SIGHUP");
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: { type: 'file', filename: 'test.log' }
|
||||
file: { type: "file", filename: "test.log" }
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'info' } }
|
||||
categories: { default: { appenders: ["file"], level: "info" } }
|
||||
});
|
||||
log4js.shutdown(() => {
|
||||
t.equal(process.listenerCount('SIGHUP'), initialListeners);
|
||||
t.equal(process.listenerCount("SIGHUP"), initialListeners);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const zlib = require('zlib');
|
||||
const EOL = require('os').EOL || '\n';
|
||||
const log4js = require('../../lib/log4js');
|
||||
const { test } = require("tap");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const zlib = require("zlib");
|
||||
const EOL = require("os").EOL || "\n";
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
function removeFile(filename) {
|
||||
try {
|
||||
@ -16,23 +14,25 @@ function removeFile(filename) {
|
||||
}
|
||||
}
|
||||
|
||||
test('log4js fileAppender', (batch) => {
|
||||
batch.test('with default fileAppender settings', (t) => {
|
||||
const testFile = path.join(__dirname, 'fa-default-test.log');
|
||||
const logger = log4js.getLogger('default-settings');
|
||||
test("log4js fileAppender", batch => {
|
||||
batch.test("with default fileAppender settings", t => {
|
||||
const testFile = path.join(__dirname, "fa-default-test.log");
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
removeFile(testFile);
|
||||
|
||||
t.tearDown(() => { removeFile(testFile); });
|
||||
|
||||
log4js.configure({
|
||||
appenders: { file: { type: 'file', filename: testFile } },
|
||||
categories: { default: { appenders: ['file'], level: 'debug' } }
|
||||
t.tearDown(() => {
|
||||
removeFile(testFile);
|
||||
});
|
||||
|
||||
logger.info('This should be in the file.');
|
||||
log4js.configure({
|
||||
appenders: { file: { type: "file", filename: testFile } },
|
||||
categories: { default: { appenders: ["file"], level: "debug" } }
|
||||
});
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(() => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
t.include(fileContents, `This should be in the file.${EOL}`);
|
||||
t.match(
|
||||
fileContents,
|
||||
@ -43,22 +43,22 @@ test('log4js fileAppender', (batch) => {
|
||||
}, 100);
|
||||
});
|
||||
|
||||
batch.test('should flush logs on shutdown', (t) => {
|
||||
const testFile = path.join(__dirname, 'fa-default-test.log');
|
||||
batch.test("should flush logs on shutdown", t => {
|
||||
const testFile = path.join(__dirname, "fa-default-test.log");
|
||||
removeFile(testFile);
|
||||
|
||||
log4js.configure({
|
||||
appenders: { test: { type: 'file', filename: testFile } },
|
||||
categories: { default: { appenders: ['test'], level: 'trace' } }
|
||||
appenders: { test: { type: "file", filename: testFile } },
|
||||
categories: { default: { appenders: ["test"], level: "trace" } }
|
||||
});
|
||||
const logger = log4js.getLogger('default-settings');
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
|
||||
logger.info('1');
|
||||
logger.info('2');
|
||||
logger.info('3');
|
||||
logger.info("1");
|
||||
logger.info("2");
|
||||
logger.info("3");
|
||||
|
||||
log4js.shutdown(() => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
// 3 lines of output, plus the trailing newline.
|
||||
t.equal(fileContents.split(EOL).length, 4);
|
||||
t.match(
|
||||
@ -70,9 +70,9 @@ test('log4js fileAppender', (batch) => {
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('with a max file size and no backups', (t) => {
|
||||
const testFile = path.join(__dirname, 'fa-maxFileSize-test.log');
|
||||
const logger = log4js.getLogger('max-file-size');
|
||||
batch.test("with a max file size and no backups", t => {
|
||||
const testFile = path.join(__dirname, "fa-maxFileSize-test.log");
|
||||
const logger = log4js.getLogger("max-file-size");
|
||||
|
||||
t.tearDown(() => {
|
||||
removeFile(testFile);
|
||||
@ -85,34 +85,39 @@ test('log4js fileAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: 100, backups: 0
|
||||
type: "file",
|
||||
filename: testFile,
|
||||
maxLogSize: 100,
|
||||
backups: 0
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['file'], level: 'debug' }
|
||||
default: { appenders: ["file"], level: "debug" }
|
||||
}
|
||||
});
|
||||
|
||||
logger.info('This is the first log message.');
|
||||
logger.info('This is an intermediate log message.');
|
||||
logger.info('This is the second log message.');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
// wait for the file system to catch up
|
||||
setTimeout(() => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
t.include(fileContents, 'This is the second log message.');
|
||||
t.equal(fileContents.indexOf('This is the first log message.'), -1);
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
t.include(fileContents, "This is the second log message.");
|
||||
t.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
fs.readdir(__dirname, (e, files) => {
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-test.log'));
|
||||
t.equal(logFiles.length, 2, 'should be 2 files');
|
||||
const logFiles = files.filter(file =>
|
||||
file.includes("fa-maxFileSize-test.log")
|
||||
);
|
||||
t.equal(logFiles.length, 2, "should be 2 files");
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
}, 100);
|
||||
});
|
||||
|
||||
batch.test('with a max file size in unit mode and no backups', (t) => {
|
||||
const testFile = path.join(__dirname, 'fa-maxFileSize-unit-test.log');
|
||||
const logger = log4js.getLogger('max-file-size-unit');
|
||||
batch.test("with a max file size in unit mode and no backups", t => {
|
||||
const testFile = path.join(__dirname, "fa-maxFileSize-unit-test.log");
|
||||
const logger = log4js.getLogger("max-file-size-unit");
|
||||
|
||||
t.tearDown(() => {
|
||||
removeFile(testFile);
|
||||
@ -125,37 +130,45 @@ test('log4js fileAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: '1K', backups: 0
|
||||
type: "file",
|
||||
filename: testFile,
|
||||
maxLogSize: "1K",
|
||||
backups: 0
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['file'], level: 'debug' }
|
||||
default: { appenders: ["file"], level: "debug" }
|
||||
}
|
||||
});
|
||||
const maxLine = 13;
|
||||
for (let i = 0; i < maxLine; i++) {
|
||||
logger.info('This is the first log message.');
|
||||
logger.info("This is the first log message.");
|
||||
}
|
||||
|
||||
logger.info('This is the second log message.');
|
||||
logger.info("This is the second log message.");
|
||||
|
||||
// wait for the file system to catch up
|
||||
setTimeout(() => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
t.include(fileContents, 'This is the second log message.');
|
||||
t.equal(fileContents.indexOf('This is the first log message.'), -1);
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
t.include(fileContents, "This is the second log message.");
|
||||
t.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
fs.readdir(__dirname, (e, files) => {
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-unit-test.log'));
|
||||
t.equal(logFiles.length, 2, 'should be 2 files');
|
||||
const logFiles = files.filter(file =>
|
||||
file.includes("fa-maxFileSize-unit-test.log")
|
||||
);
|
||||
t.equal(logFiles.length, 2, "should be 2 files");
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
}, 100);
|
||||
});
|
||||
|
||||
batch.test('with a max file size and 2 backups', (t) => {
|
||||
const testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-test.log');
|
||||
const logger = log4js.getLogger('max-file-size-backups');
|
||||
batch.test("with a max file size and 2 backups", t => {
|
||||
const testFile = path.join(
|
||||
__dirname,
|
||||
"fa-maxFileSize-with-backups-test.log"
|
||||
);
|
||||
const logger = log4js.getLogger("max-file-size-backups");
|
||||
removeFile(testFile);
|
||||
removeFile(`${testFile}.1`);
|
||||
removeFile(`${testFile}.2`);
|
||||
@ -170,52 +183,74 @@ test('log4js fileAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: 50, backups: 2
|
||||
type: "file",
|
||||
filename: testFile,
|
||||
maxLogSize: 50,
|
||||
backups: 2
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["file"], level: "debug" } }
|
||||
});
|
||||
|
||||
logger.info('This is the first log message.');
|
||||
logger.info('This is the second log message.');
|
||||
logger.info('This is the third log message.');
|
||||
logger.info('This is the fourth log message.');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
// give the system a chance to open the stream
|
||||
setTimeout(() => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.sort().filter(file => file.includes('fa-maxFileSize-with-backups-test.log'));
|
||||
const logFiles = files
|
||||
.sort()
|
||||
.filter(file =>
|
||||
file.includes("fa-maxFileSize-with-backups-test.log")
|
||||
);
|
||||
t.equal(logFiles.length, 3);
|
||||
t.same(logFiles, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log.2'
|
||||
"fa-maxFileSize-with-backups-test.log",
|
||||
"fa-maxFileSize-with-backups-test.log.1",
|
||||
"fa-maxFileSize-with-backups-test.log.2"
|
||||
]);
|
||||
t.test('the contents of the first file', (assert) => {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
assert.end();
|
||||
});
|
||||
t.test("the contents of the first file", assert => {
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[0]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the fourth log message.");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.test('the contents of the second file', (assert) => {
|
||||
fs.readFile(path.join(__dirname, logFiles[1]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
assert.end();
|
||||
});
|
||||
t.test("the contents of the second file", assert => {
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[1]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the third log message.");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.test('the contents of the third file', (assert) => {
|
||||
fs.readFile(path.join(__dirname, logFiles[2]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the second log message.');
|
||||
assert.end();
|
||||
});
|
||||
t.test("the contents of the third file", assert => {
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[2]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the second log message.");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
}, 200);
|
||||
});
|
||||
|
||||
batch.test('with a max file size and 2 compressed backups', (t) => {
|
||||
const testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-compressed-test.log');
|
||||
const logger = log4js.getLogger('max-file-size-backups');
|
||||
batch.test("with a max file size and 2 compressed backups", t => {
|
||||
const testFile = path.join(
|
||||
__dirname,
|
||||
"fa-maxFileSize-with-backups-compressed-test.log"
|
||||
);
|
||||
const logger = log4js.getLogger("max-file-size-backups");
|
||||
removeFile(testFile);
|
||||
removeFile(`${testFile}.1.gz`);
|
||||
removeFile(`${testFile}.2.gz`);
|
||||
@ -230,89 +265,122 @@ test('log4js fileAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: 50, backups: 2, compress: true
|
||||
type: "file",
|
||||
filename: testFile,
|
||||
maxLogSize: 50,
|
||||
backups: 2,
|
||||
compress: true
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["file"], level: "debug" } }
|
||||
});
|
||||
logger.info('This is the first log message.');
|
||||
logger.info('This is the second log message.');
|
||||
logger.info('This is the third log message.');
|
||||
logger.info('This is the fourth log message.');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
// give the system a chance to open the stream
|
||||
setTimeout(() => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.sort().filter(file => file.includes('fa-maxFileSize-with-backups-compressed-test.log'));
|
||||
t.equal(logFiles.length, 3, 'should be 3 files');
|
||||
const logFiles = files
|
||||
.sort()
|
||||
.filter(file =>
|
||||
file.includes("fa-maxFileSize-with-backups-compressed-test.log")
|
||||
);
|
||||
t.equal(logFiles.length, 3, "should be 3 files");
|
||||
t.same(logFiles, [
|
||||
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
|
||||
"fa-maxFileSize-with-backups-compressed-test.log",
|
||||
"fa-maxFileSize-with-backups-compressed-test.log.1.gz",
|
||||
"fa-maxFileSize-with-backups-compressed-test.log.2.gz"
|
||||
]);
|
||||
t.test('the contents of the first file', (assert) => {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
assert.end();
|
||||
});
|
||||
t.test("the contents of the first file", assert => {
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[0]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the fourth log message.");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.test('the contents of the second file', (assert) => {
|
||||
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[1])), (e, contents) => {
|
||||
assert.include(contents.toString('utf8'), 'This is the third log message.');
|
||||
assert.end();
|
||||
});
|
||||
t.test("the contents of the second file", assert => {
|
||||
zlib.gunzip(
|
||||
fs.readFileSync(path.join(__dirname, logFiles[1])),
|
||||
(e, contents) => {
|
||||
assert.include(
|
||||
contents.toString("utf8"),
|
||||
"This is the third log message."
|
||||
);
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.test('the contents of the third file', (assert) => {
|
||||
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[2])), (e, contents) => {
|
||||
assert.include(contents.toString('utf8'), 'This is the second log message.');
|
||||
assert.end();
|
||||
});
|
||||
t.test("the contents of the third file", assert => {
|
||||
zlib.gunzip(
|
||||
fs.readFileSync(path.join(__dirname, logFiles[2])),
|
||||
(e, contents) => {
|
||||
assert.include(
|
||||
contents.toString("utf8"),
|
||||
"This is the second log message."
|
||||
);
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
batch.test('when underlying stream errors', (t) => {
|
||||
batch.test("when underlying stream errors", t => {
|
||||
let consoleArgs;
|
||||
let errorHandler;
|
||||
|
||||
const fileAppender = sandbox.require(
|
||||
'../../lib/appenders/file',
|
||||
{
|
||||
globals: {
|
||||
console: {
|
||||
error: function () {
|
||||
consoleArgs = Array.prototype.slice.call(arguments);
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
streamroller: {
|
||||
RollingFileStream: function () {
|
||||
this.end = function () {
|
||||
};
|
||||
this.on = function (evt, cb) {
|
||||
if (evt === 'error') {
|
||||
errorHandler = cb;
|
||||
}
|
||||
};
|
||||
this.write = function () {
|
||||
return true;
|
||||
};
|
||||
}
|
||||
}
|
||||
const RollingFileStream = class {
|
||||
end() {
|
||||
this.ended = true;
|
||||
}
|
||||
|
||||
on(evt, cb) {
|
||||
if (evt === "error") {
|
||||
this.errored = true;
|
||||
errorHandler = cb;
|
||||
}
|
||||
}
|
||||
|
||||
write() {
|
||||
this.written = true;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
const fileAppender = sandbox.require("../../lib/appenders/file", {
|
||||
globals: {
|
||||
console: {
|
||||
error(...args) {
|
||||
consoleArgs = args;
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
streamroller: {
|
||||
RollingFileStream
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
fileAppender.configure(
|
||||
{ filename: "test1.log", maxLogSize: 100 },
|
||||
{ basicLayout() {} }
|
||||
);
|
||||
errorHandler({ error: "aargh" });
|
||||
|
||||
fileAppender.configure({ filename: 'test1.log', maxLogSize: 100 }, { basicLayout: function () {} });
|
||||
errorHandler({ error: 'aargh' });
|
||||
|
||||
t.test('should log the error to console.error', (assert) => {
|
||||
t.test("should log the error to console.error", assert => {
|
||||
assert.ok(consoleArgs);
|
||||
assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened ');
|
||||
assert.equal(consoleArgs[1], 'test1.log');
|
||||
assert.equal(consoleArgs[2].error, 'aargh');
|
||||
assert.equal(
|
||||
consoleArgs[0],
|
||||
"log4js.fileAppender - Writing to file %s, error happened "
|
||||
);
|
||||
assert.equal(consoleArgs[1], "test1.log");
|
||||
assert.equal(consoleArgs[2].error, "aargh");
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const EOL = require('os').EOL || '\n';
|
||||
const log4js = require('../../lib/log4js');
|
||||
const { test } = require("tap");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const EOL = require("os").EOL || "\n";
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
@ -14,10 +12,10 @@ function remove(filename) {
|
||||
}
|
||||
}
|
||||
|
||||
test('log4js fileSyncAppender', (batch) => {
|
||||
batch.test('with default fileSyncAppender settings', (t) => {
|
||||
const testFile = path.join(__dirname, '/fa-default-sync-test.log');
|
||||
const logger = log4js.getLogger('default-settings');
|
||||
test("log4js fileSyncAppender", batch => {
|
||||
batch.test("with default fileSyncAppender settings", t => {
|
||||
const testFile = path.join(__dirname, "/fa-default-sync-test.log");
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
remove(testFile);
|
||||
|
||||
t.tearDown(() => {
|
||||
@ -25,13 +23,13 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
});
|
||||
|
||||
log4js.configure({
|
||||
appenders: { sync: { type: 'fileSync', filename: testFile } },
|
||||
categories: { default: { appenders: ['sync'], level: 'debug' } }
|
||||
appenders: { sync: { type: "fileSync", filename: testFile } },
|
||||
categories: { default: { appenders: ["sync"], level: "debug" } }
|
||||
});
|
||||
|
||||
logger.info('This should be in the file.');
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
t.include(fileContents, `This should be in the file.${EOL}`);
|
||||
t.match(
|
||||
fileContents,
|
||||
@ -41,9 +39,9 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('with a max file size and no backups', (t) => {
|
||||
const testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log');
|
||||
const logger = log4js.getLogger('max-file-size');
|
||||
batch.test("with a max file size and no backups", t => {
|
||||
const testFile = path.join(__dirname, "/fa-maxFileSize-sync-test.log");
|
||||
const logger = log4js.getLogger("max-file-size");
|
||||
|
||||
remove(testFile);
|
||||
remove(`${testFile}.1`);
|
||||
@ -57,26 +55,34 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync', filename: testFile, maxLogSize: 100, backups: 0
|
||||
type: "fileSync",
|
||||
filename: testFile,
|
||||
maxLogSize: 100,
|
||||
backups: 0
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['sync'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["sync"], level: "debug" } }
|
||||
});
|
||||
logger.info('This is the first log message.');
|
||||
logger.info('This is an intermediate log message.');
|
||||
logger.info('This is the second log message.');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
|
||||
t.test('log file should only contain the second message', (assert) => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
t.test("log file should only contain the second message", assert => {
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
assert.include(fileContents, `This is the second log message.${EOL}`);
|
||||
assert.equal(fileContents.indexOf('This is the first log message.'), -1);
|
||||
assert.equal(
|
||||
fileContents.indexOf("This is the first log message."),
|
||||
-1
|
||||
);
|
||||
assert.end();
|
||||
});
|
||||
});
|
||||
|
||||
t.test('there should be two test files', (assert) => {
|
||||
t.test("there should be two test files", assert => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-sync-test.log'));
|
||||
const logFiles = files.filter(file =>
|
||||
file.includes("fa-maxFileSize-sync-test.log")
|
||||
);
|
||||
assert.equal(logFiles.length, 2);
|
||||
assert.end();
|
||||
});
|
||||
@ -84,9 +90,9 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('with a max file size in unit mode and no backups', (t) => {
|
||||
const testFile = path.join(__dirname, '/fa-maxFileSize-unit-sync-test.log');
|
||||
const logger = log4js.getLogger('max-file-size-unit');
|
||||
batch.test("with a max file size in unit mode and no backups", t => {
|
||||
const testFile = path.join(__dirname, "/fa-maxFileSize-unit-sync-test.log");
|
||||
const logger = log4js.getLogger("max-file-size-unit");
|
||||
|
||||
remove(testFile);
|
||||
remove(`${testFile}.1`);
|
||||
@ -100,29 +106,37 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync', filename: testFile, maxLogSize: '1K', backups: 0
|
||||
type: "fileSync",
|
||||
filename: testFile,
|
||||
maxLogSize: "1K",
|
||||
backups: 0
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['sync'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["sync"], level: "debug" } }
|
||||
});
|
||||
const maxLine = 13;
|
||||
for (let i = 0; i < maxLine; i++) {
|
||||
logger.info('This is the first log message.');
|
||||
logger.info("This is the first log message.");
|
||||
}
|
||||
|
||||
logger.info('This is the second log message.');
|
||||
logger.info("This is the second log message.");
|
||||
|
||||
t.test('log file should only contain the second message', (assert) => {
|
||||
fs.readFile(testFile, 'utf8', (err, fileContents) => {
|
||||
t.test("log file should only contain the second message", assert => {
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
assert.include(fileContents, `This is the second log message.${EOL}`);
|
||||
assert.equal(fileContents.indexOf('This is the first log message.'), -1);
|
||||
assert.equal(
|
||||
fileContents.indexOf("This is the first log message."),
|
||||
-1
|
||||
);
|
||||
assert.end();
|
||||
});
|
||||
});
|
||||
|
||||
t.test('there should be two test files', (assert) => {
|
||||
t.test("there should be two test files", assert => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-unit-sync-test.log'));
|
||||
const logFiles = files.filter(file =>
|
||||
file.includes("fa-maxFileSize-unit-sync-test.log")
|
||||
);
|
||||
assert.equal(logFiles.length, 2);
|
||||
assert.end();
|
||||
});
|
||||
@ -130,9 +144,12 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('with a max file size and 2 backups', (t) => {
|
||||
const testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log');
|
||||
const logger = log4js.getLogger('max-file-size-backups');
|
||||
batch.test("with a max file size and 2 backups", t => {
|
||||
const testFile = path.join(
|
||||
__dirname,
|
||||
"/fa-maxFileSize-with-backups-sync-test.log"
|
||||
);
|
||||
const logger = log4js.getLogger("max-file-size-backups");
|
||||
remove(testFile);
|
||||
remove(`${testFile}.1`);
|
||||
remove(`${testFile}.2`);
|
||||
@ -147,89 +164,110 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync', filename: testFile, maxLogSize: 50, backups: 2
|
||||
type: "fileSync",
|
||||
filename: testFile,
|
||||
maxLogSize: 50,
|
||||
backups: 2
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['sync'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["sync"], level: "debug" } }
|
||||
});
|
||||
logger.info('This is the first log message.');
|
||||
logger.info('This is the second log message.');
|
||||
logger.info('This is the third log message.');
|
||||
logger.info('This is the fourth log message.');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
|
||||
t.test('the log files', (assert) => {
|
||||
t.test("the log files", assert => {
|
||||
assert.plan(5);
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-with-backups-sync-test.log'));
|
||||
assert.equal(logFiles.length, 3, 'should be 3 files');
|
||||
assert.same(logFiles, [
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.2'
|
||||
], 'should be named in sequence');
|
||||
const logFiles = files.filter(file =>
|
||||
file.includes("fa-maxFileSize-with-backups-sync-test.log")
|
||||
);
|
||||
assert.equal(logFiles.length, 3, "should be 3 files");
|
||||
assert.same(
|
||||
logFiles,
|
||||
[
|
||||
"fa-maxFileSize-with-backups-sync-test.log",
|
||||
"fa-maxFileSize-with-backups-sync-test.log.1",
|
||||
"fa-maxFileSize-with-backups-sync-test.log.2"
|
||||
],
|
||||
"should be named in sequence"
|
||||
);
|
||||
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
});
|
||||
fs.readFile(path.join(__dirname, logFiles[1]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
});
|
||||
fs.readFile(path.join(__dirname, logFiles[2]), 'utf8', (e, contents) => {
|
||||
assert.include(contents, 'This is the second log message.');
|
||||
});
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[0]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the fourth log message.");
|
||||
}
|
||||
);
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[1]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the third log message.");
|
||||
}
|
||||
);
|
||||
fs.readFile(
|
||||
path.join(__dirname, logFiles[2]),
|
||||
"utf8",
|
||||
(e, contents) => {
|
||||
assert.include(contents, "This is the second log message.");
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('configure with fileSyncAppender', (t) => {
|
||||
batch.test("configure with fileSyncAppender", t => {
|
||||
// this config defines one file appender (to ./tmp-sync-tests.log)
|
||||
// and sets the log level for "tests" to WARN
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync',
|
||||
filename: 'tmp-sync-tests.log',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
type: "fileSync",
|
||||
filename: "tmp-sync-tests.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['sync'], level: 'debug' },
|
||||
tests: { appenders: ['sync'], level: 'warn' }
|
||||
default: { appenders: ["sync"], level: "debug" },
|
||||
tests: { appenders: ["sync"], level: "warn" }
|
||||
}
|
||||
});
|
||||
const logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
const logger = log4js.getLogger("tests");
|
||||
logger.info("this should not be written to the file");
|
||||
logger.warn("this should be written to the file");
|
||||
|
||||
fs.readFile('tmp-sync-tests.log', 'utf8', (err, contents) => {
|
||||
fs.readFile("tmp-sync-tests.log", "utf8", (err, contents) => {
|
||||
t.include(contents, `this should be written to the file${EOL}`);
|
||||
t.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
t.equal(contents.indexOf("this should not be written to the file"), -1);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('test options', (t) => {
|
||||
batch.test("test options", t => {
|
||||
// using non-standard options
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync',
|
||||
filename: 'tmp-options-tests.log',
|
||||
layout: { type: 'messagePassThrough' },
|
||||
flags: 'w',
|
||||
encoding: 'ascii',
|
||||
type: "fileSync",
|
||||
filename: "tmp-options-tests.log",
|
||||
layout: { type: "messagePassThrough" },
|
||||
flags: "w",
|
||||
encoding: "ascii",
|
||||
mode: 0o666
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['sync'], level: 'info' }
|
||||
default: { appenders: ["sync"], level: "info" }
|
||||
}
|
||||
});
|
||||
const logger = log4js.getLogger();
|
||||
logger.warn('log message');
|
||||
logger.warn("log message");
|
||||
|
||||
fs.readFile('tmp-options-tests.log', 'ascii', (err, contents) => {
|
||||
fs.readFile("tmp-options-tests.log", "ascii", (err, contents) => {
|
||||
t.include(contents, `log message${EOL}`);
|
||||
t.end();
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,8 @@
|
||||
const test = require('tap').test;
|
||||
const { test } = require("tap");
|
||||
|
||||
test('Accessing things setup in configure before configure is called', (batch) => {
|
||||
batch.test('should work', (t) => {
|
||||
const log4js = require('../../lib/log4js');
|
||||
test("Accessing things setup in configure before configure is called", batch => {
|
||||
batch.test("should work", t => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
t.ok(log4js.levels);
|
||||
t.ok(log4js.connectLogger);
|
||||
t.end();
|
||||
|
||||
@ -1,40 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const levels = require('../../lib/levels');
|
||||
const { test } = require("tap");
|
||||
const levels = require("../../lib/levels");
|
||||
|
||||
function assertThat(assert, level) {
|
||||
function assertForEach(assertion, testFn, otherLevels) {
|
||||
otherLevels.forEach((other) => {
|
||||
otherLevels.forEach(other => {
|
||||
assertion.call(assert, testFn.call(level, other));
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
isLessThanOrEqualTo: function (lvls) {
|
||||
isLessThanOrEqualTo(lvls) {
|
||||
assertForEach(assert.ok, level.isLessThanOrEqualTo, lvls);
|
||||
},
|
||||
isNotLessThanOrEqualTo: function (lvls) {
|
||||
isNotLessThanOrEqualTo(lvls) {
|
||||
assertForEach(assert.notOk, level.isLessThanOrEqualTo, lvls);
|
||||
},
|
||||
isGreaterThanOrEqualTo: function (lvls) {
|
||||
isGreaterThanOrEqualTo(lvls) {
|
||||
assertForEach(assert.ok, level.isGreaterThanOrEqualTo, lvls);
|
||||
},
|
||||
isNotGreaterThanOrEqualTo: function (lvls) {
|
||||
isNotGreaterThanOrEqualTo(lvls) {
|
||||
assertForEach(assert.notOk, level.isGreaterThanOrEqualTo, lvls);
|
||||
},
|
||||
isEqualTo: function (lvls) {
|
||||
isEqualTo(lvls) {
|
||||
assertForEach(assert.ok, level.isEqualTo, lvls);
|
||||
},
|
||||
isNotEqualTo: function (lvls) {
|
||||
isNotEqualTo(lvls) {
|
||||
assertForEach(assert.notOk, level.isEqualTo, lvls);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test('levels', (batch) => {
|
||||
batch.test('values', (t) => {
|
||||
t.test('should define some levels', (assert) => {
|
||||
test("levels", batch => {
|
||||
batch.test("values", t => {
|
||||
t.test("should define some levels", assert => {
|
||||
assert.ok(levels.ALL);
|
||||
assert.ok(levels.TRACE);
|
||||
assert.ok(levels.DEBUG);
|
||||
@ -47,7 +45,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('ALL', (assert) => {
|
||||
t.test("ALL", assert => {
|
||||
const all = levels.ALL;
|
||||
assertThat(assert, all).isLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
@ -70,7 +68,7 @@ test('levels', (batch) => {
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, all).isEqualTo([levels.getLevel('ALL')]);
|
||||
assertThat(assert, all).isEqualTo([levels.getLevel("ALL")]);
|
||||
assertThat(assert, all).isNotEqualTo([
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
@ -84,7 +82,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('TRACE', (assert) => {
|
||||
t.test("TRACE", assert => {
|
||||
const trace = levels.TRACE;
|
||||
assertThat(assert, trace).isLessThanOrEqualTo([
|
||||
levels.DEBUG,
|
||||
@ -96,7 +94,10 @@ test('levels', (batch) => {
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, trace).isNotLessThanOrEqualTo([levels.ALL]);
|
||||
assertThat(assert, trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(assert, trace).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE
|
||||
]);
|
||||
assertThat(assert, trace).isNotGreaterThanOrEqualTo([
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
@ -106,7 +107,7 @@ test('levels', (batch) => {
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, trace).isEqualTo([levels.getLevel('TRACE')]);
|
||||
assertThat(assert, trace).isEqualTo([levels.getLevel("TRACE")]);
|
||||
assertThat(assert, trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
@ -120,7 +121,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('DEBUG', (assert) => {
|
||||
t.test("DEBUG", assert => {
|
||||
const debug = levels.DEBUG;
|
||||
assertThat(assert, debug).isLessThanOrEqualTo([
|
||||
levels.INFO,
|
||||
@ -130,8 +131,14 @@ test('levels', (batch) => {
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(assert, debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(assert, debug).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE
|
||||
]);
|
||||
assertThat(assert, debug).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE
|
||||
]);
|
||||
assertThat(assert, debug).isNotGreaterThanOrEqualTo([
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
@ -140,7 +147,7 @@ test('levels', (batch) => {
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, debug).isEqualTo([levels.getLevel('DEBUG')]);
|
||||
assertThat(assert, debug).isEqualTo([levels.getLevel("DEBUG")]);
|
||||
assertThat(assert, debug).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -154,7 +161,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('INFO', (assert) => {
|
||||
t.test("INFO", assert => {
|
||||
const info = levels.INFO;
|
||||
assertThat(assert, info).isLessThanOrEqualTo([
|
||||
levels.WARN,
|
||||
@ -163,8 +170,16 @@ test('levels', (batch) => {
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(assert, info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(assert, info).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG
|
||||
]);
|
||||
assertThat(assert, info).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG
|
||||
]);
|
||||
assertThat(assert, info).isNotGreaterThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
@ -172,7 +187,7 @@ test('levels', (batch) => {
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, info).isEqualTo([levels.getLevel('INFO')]);
|
||||
assertThat(assert, info).isEqualTo([levels.getLevel("INFO")]);
|
||||
assertThat(assert, info).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -186,9 +201,14 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('WARN', (assert) => {
|
||||
t.test("WARN", assert => {
|
||||
const warn = levels.WARN;
|
||||
assertThat(assert, warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(assert, warn).isLessThanOrEqualTo([
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, warn).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -202,9 +222,12 @@ test('levels', (batch) => {
|
||||
levels.INFO
|
||||
]);
|
||||
assertThat(assert, warn).isNotGreaterThanOrEqualTo([
|
||||
levels.ERROR, levels.FATAL, levels.MARK, levels.OFF
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, warn).isEqualTo([levels.getLevel('WARN')]);
|
||||
assertThat(assert, warn).isEqualTo([levels.getLevel("WARN")]);
|
||||
assertThat(assert, warn).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -217,9 +240,13 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('ERROR', (assert) => {
|
||||
t.test("ERROR", assert => {
|
||||
const error = levels.ERROR;
|
||||
assertThat(assert, error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(assert, error).isLessThanOrEqualTo([
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, error).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -234,8 +261,12 @@ test('levels', (batch) => {
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
assertThat(assert, error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(assert, error).isEqualTo([levels.getLevel('ERROR')]);
|
||||
assertThat(assert, error).isNotGreaterThanOrEqualTo([
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, error).isEqualTo([levels.getLevel("ERROR")]);
|
||||
assertThat(assert, error).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -249,7 +280,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('FATAL', (assert) => {
|
||||
t.test("FATAL", assert => {
|
||||
const fatal = levels.FATAL;
|
||||
assertThat(assert, fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
assertThat(assert, fatal).isNotLessThanOrEqualTo([
|
||||
@ -268,8 +299,11 @@ test('levels', (batch) => {
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
assertThat(assert, fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
assertThat(assert, fatal).isEqualTo([levels.getLevel('FATAL')]);
|
||||
assertThat(assert, fatal).isNotGreaterThanOrEqualTo([
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, fatal).isEqualTo([levels.getLevel("FATAL")]);
|
||||
assertThat(assert, fatal).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -283,7 +317,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('MARK', (assert) => {
|
||||
t.test("MARK", assert => {
|
||||
const mark = levels.MARK;
|
||||
assertThat(assert, mark).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(assert, mark).isNotLessThanOrEqualTo([
|
||||
@ -305,7 +339,7 @@ test('levels', (batch) => {
|
||||
levels.FATAL
|
||||
]);
|
||||
assertThat(assert, mark).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||
assertThat(assert, mark).isEqualTo([levels.getLevel('MARK')]);
|
||||
assertThat(assert, mark).isEqualTo([levels.getLevel("MARK")]);
|
||||
assertThat(assert, mark).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -319,7 +353,7 @@ test('levels', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('OFF', (assert) => {
|
||||
t.test("OFF", assert => {
|
||||
const off = levels.OFF;
|
||||
assertThat(assert, off).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
@ -341,7 +375,7 @@ test('levels', (batch) => {
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
assertThat(assert, off).isEqualTo([levels.getLevel('OFF')]);
|
||||
assertThat(assert, off).isEqualTo([levels.getLevel("OFF")]);
|
||||
assertThat(assert, off).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
@ -357,33 +391,48 @@ test('levels', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('isGreaterThanOrEqualTo', (t) => {
|
||||
batch.test("isGreaterThanOrEqualTo", t => {
|
||||
const info = levels.INFO;
|
||||
assertThat(t, info).isGreaterThanOrEqualTo(['all', 'trace', 'debug']);
|
||||
assertThat(t, info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||
assertThat(t, info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(t, info).isNotGreaterThanOrEqualTo([
|
||||
"warn",
|
||||
"ERROR",
|
||||
"Fatal",
|
||||
"MARK",
|
||||
"off"
|
||||
]);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('isLessThanOrEqualTo', (t) => {
|
||||
batch.test("isLessThanOrEqualTo", t => {
|
||||
const info = levels.INFO;
|
||||
assertThat(t, info).isNotLessThanOrEqualTo(['all', 'trace', 'debug']);
|
||||
assertThat(t, info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||
assertThat(t, info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(t, info).isLessThanOrEqualTo([
|
||||
"warn",
|
||||
"ERROR",
|
||||
"Fatal",
|
||||
"MARK",
|
||||
"off"
|
||||
]);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('isEqualTo', (t) => {
|
||||
batch.test("isEqualTo", t => {
|
||||
const info = levels.INFO;
|
||||
assertThat(t, info).isEqualTo(['info', 'INFO', 'iNfO']);
|
||||
assertThat(t, info).isEqualTo(["info", "INFO", "iNfO"]);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('getLevel', (t) => {
|
||||
t.equal(levels.getLevel('debug'), levels.DEBUG);
|
||||
t.equal(levels.getLevel('DEBUG'), levels.DEBUG);
|
||||
t.equal(levels.getLevel('DeBuG'), levels.DEBUG);
|
||||
t.notOk(levels.getLevel('cheese'));
|
||||
t.equal(levels.getLevel('cheese', levels.DEBUG), levels.DEBUG);
|
||||
t.equal(levels.getLevel({ level: 10000, levelStr: 'DEBUG', colour: 'cyan' }), levels.DEBUG);
|
||||
batch.test("getLevel", t => {
|
||||
t.equal(levels.getLevel("debug"), levels.DEBUG);
|
||||
t.equal(levels.getLevel("DEBUG"), levels.DEBUG);
|
||||
t.equal(levels.getLevel("DeBuG"), levels.DEBUG);
|
||||
t.notOk(levels.getLevel("cheese"));
|
||||
t.equal(levels.getLevel("cheese", levels.DEBUG), levels.DEBUG);
|
||||
t.equal(
|
||||
levels.getLevel({ level: 10000, levelStr: "DEBUG", colour: "cyan" }),
|
||||
levels.DEBUG
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
|
||||
const test = require('tap').test;
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
|
||||
const EOL = os.EOL || '\n';
|
||||
const EOL = os.EOL || "\n";
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
@ -14,40 +12,47 @@ function remove(filename) {
|
||||
}
|
||||
}
|
||||
|
||||
test('log4js logLevelFilter', (batch) => {
|
||||
batch.test('appender', (t) => {
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recording = require('../../lib/appenders/recording');
|
||||
test("log4js logLevelFilter", batch => {
|
||||
batch.test("appender", t => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recording = require("../../lib/appenders/recording");
|
||||
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
filtered: { type: 'logLevelFilter', appender: 'recorder', level: 'ERROR' }
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: "logLevelFilter",
|
||||
appender: "recorder",
|
||||
level: "ERROR"
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['filtered'], level: 'debug' }
|
||||
default: { appenders: ["filtered"], level: "debug" }
|
||||
}
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger('logLevelTest');
|
||||
logger.debug('this should not trigger an event');
|
||||
logger.warn('neither should this');
|
||||
logger.error('this should, though');
|
||||
logger.fatal('so should this');
|
||||
const logger = log4js.getLogger("logLevelTest");
|
||||
logger.debug("this should not trigger an event");
|
||||
logger.warn("neither should this");
|
||||
logger.error("this should, though");
|
||||
logger.fatal("so should this");
|
||||
|
||||
const logEvents = recording.replay();
|
||||
|
||||
t.test('should only pass log events greater than or equal to its own level', (assert) => {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'this should, though');
|
||||
assert.equal(logEvents[1].data[0], 'so should this');
|
||||
assert.end();
|
||||
});
|
||||
t.test(
|
||||
"should only pass log events greater than or equal to its own level",
|
||||
assert => {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], "this should, though");
|
||||
assert.equal(logEvents[1].data[0], "so should this");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('configure', (t) => {
|
||||
const log4js = require('../../lib/log4js');
|
||||
batch.test("configure", t => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
remove(`${__dirname}/logLevelFilter.log`);
|
||||
remove(`${__dirname}/logLevelFilter-warnings.log`);
|
||||
@ -61,69 +66,94 @@ test('log4js logLevelFilter', (batch) => {
|
||||
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
'warning-file': {
|
||||
type: 'file',
|
||||
filename: 'test/tap/logLevelFilter-warnings.log',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
"warning-file": {
|
||||
type: "file",
|
||||
filename: "test/tap/logLevelFilter-warnings.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
},
|
||||
warnings: {
|
||||
type: 'logLevelFilter',
|
||||
level: 'WARN',
|
||||
appender: 'warning-file'
|
||||
type: "logLevelFilter",
|
||||
level: "WARN",
|
||||
appender: "warning-file"
|
||||
},
|
||||
'debug-file': {
|
||||
type: 'file',
|
||||
filename: 'test/tap/logLevelFilter-debugs.log',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
"debug-file": {
|
||||
type: "file",
|
||||
filename: "test/tap/logLevelFilter-debugs.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
},
|
||||
debugs: {
|
||||
type: 'logLevelFilter',
|
||||
level: 'TRACE',
|
||||
maxLevel: 'DEBUG',
|
||||
appender: 'debug-file'
|
||||
type: "logLevelFilter",
|
||||
level: "TRACE",
|
||||
maxLevel: "DEBUG",
|
||||
appender: "debug-file"
|
||||
},
|
||||
tests: {
|
||||
type: 'file',
|
||||
filename: 'test/tap/logLevelFilter.log',
|
||||
type: "file",
|
||||
filename: "test/tap/logLevelFilter.log",
|
||||
layout: {
|
||||
type: 'messagePassThrough'
|
||||
type: "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['tests', 'warnings', 'debugs'], level: 'trace' }
|
||||
default: { appenders: ["tests", "warnings", "debugs"], level: "trace" }
|
||||
}
|
||||
});
|
||||
const logger = log4js.getLogger('tests');
|
||||
logger.debug('debug');
|
||||
logger.info('info');
|
||||
logger.error('error');
|
||||
logger.warn('warn');
|
||||
logger.debug('debug');
|
||||
logger.trace('trace');
|
||||
const logger = log4js.getLogger("tests");
|
||||
logger.debug("debug");
|
||||
logger.info("info");
|
||||
logger.error("error");
|
||||
logger.warn("warn");
|
||||
logger.debug("debug");
|
||||
logger.trace("trace");
|
||||
// wait for the file system to catch up
|
||||
setTimeout(() => {
|
||||
t.test('tmp-tests.log should contain all log messages', (assert) => {
|
||||
fs.readFile(`${__dirname}/logLevelFilter.log`, 'utf8', (err, contents) => {
|
||||
const messages = contents.trim().split(EOL);
|
||||
assert.same(messages, ['debug', 'info', 'error', 'warn', 'debug', 'trace']);
|
||||
assert.end();
|
||||
});
|
||||
});
|
||||
t.test('tmp-tests-warnings.log should contain only error and warning logs', (assert) => {
|
||||
fs.readFile(`${__dirname}/logLevelFilter-warnings.log`, 'utf8', (err, contents) => {
|
||||
const messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['error', 'warn']);
|
||||
assert.end();
|
||||
});
|
||||
});
|
||||
t.test('tmp-tests-debugs.log should contain only trace and debug logs', (assert) => {
|
||||
fs.readFile(`${__dirname}/logLevelFilter-debugs.log`, 'utf8', (err, contents) => {
|
||||
const messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['debug', 'debug', 'trace']);
|
||||
assert.end();
|
||||
});
|
||||
t.test("tmp-tests.log should contain all log messages", assert => {
|
||||
fs.readFile(
|
||||
`${__dirname}/logLevelFilter.log`,
|
||||
"utf8",
|
||||
(err, contents) => {
|
||||
const messages = contents.trim().split(EOL);
|
||||
assert.same(messages, [
|
||||
"debug",
|
||||
"info",
|
||||
"error",
|
||||
"warn",
|
||||
"debug",
|
||||
"trace"
|
||||
]);
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
t.test(
|
||||
"tmp-tests-warnings.log should contain only error and warning logs",
|
||||
assert => {
|
||||
fs.readFile(
|
||||
`${__dirname}/logLevelFilter-warnings.log`,
|
||||
"utf8",
|
||||
(err, contents) => {
|
||||
const messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ["error", "warn"]);
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
t.test(
|
||||
"tmp-tests-debugs.log should contain only trace and debug logs",
|
||||
assert => {
|
||||
fs.readFile(
|
||||
`${__dirname}/logLevelFilter-debugs.log`,
|
||||
"utf8",
|
||||
(err, contents) => {
|
||||
const messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ["debug", "debug", "trace"]);
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
t.end();
|
||||
}, 500);
|
||||
});
|
||||
|
||||
@ -1,76 +1,75 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const debug = require('debug')('log4js:test.logger');
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const callsites = require('callsites');
|
||||
const levels = require('../../lib/levels');
|
||||
const { test } = require("tap");
|
||||
const debug = require("debug")("log4js:test.logger");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const callsites = require("callsites");
|
||||
const levels = require("../../lib/levels");
|
||||
|
||||
const events = [];
|
||||
const Logger = sandbox.require(
|
||||
'../../lib/logger',
|
||||
{
|
||||
requires: {
|
||||
'./levels': levels,
|
||||
'./clustering': {
|
||||
isMaster: () => true,
|
||||
onlyOnMaster: fn => fn(),
|
||||
send: (evt) => {
|
||||
debug('fake clustering got event:', evt);
|
||||
events.push(evt);
|
||||
}
|
||||
const Logger = sandbox.require("../../lib/logger", {
|
||||
requires: {
|
||||
"./levels": levels,
|
||||
"./clustering": {
|
||||
isMaster: () => true,
|
||||
onlyOnMaster: fn => fn(),
|
||||
send: evt => {
|
||||
debug("fake clustering got event:", evt);
|
||||
events.push(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
const testConfig = {
|
||||
level: levels.TRACE
|
||||
};
|
||||
|
||||
test('../../lib/logger', (batch) => {
|
||||
batch.beforeEach((done) => {
|
||||
test("../../lib/logger", batch => {
|
||||
batch.beforeEach(done => {
|
||||
events.length = 0;
|
||||
testConfig.level = levels.TRACE;
|
||||
done();
|
||||
});
|
||||
|
||||
batch.test('constructor with no parameters', (t) => {
|
||||
t.throws(
|
||||
() => new Logger(),
|
||||
new Error('No category provided.')
|
||||
);
|
||||
batch.test("constructor with no parameters", t => {
|
||||
t.throws(() => new Logger(), new Error("No category provided."));
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('constructor with category', (t) => {
|
||||
const logger = new Logger('cheese');
|
||||
t.equal(logger.category, 'cheese', 'should use category');
|
||||
t.equal(logger.level, levels.OFF, 'should use OFF log level');
|
||||
batch.test("constructor with category", t => {
|
||||
const logger = new Logger("cheese");
|
||||
t.equal(logger.category, "cheese", "should use category");
|
||||
t.equal(logger.level, levels.OFF, "should use OFF log level");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('set level should delegate', (t) => {
|
||||
const logger = new Logger('cheese');
|
||||
logger.level = 'debug';
|
||||
t.equal(logger.category, 'cheese', 'should use category');
|
||||
t.equal(logger.level, levels.DEBUG, 'should use level');
|
||||
batch.test("set level should delegate", t => {
|
||||
const logger = new Logger("cheese");
|
||||
logger.level = "debug";
|
||||
t.equal(logger.category, "cheese", "should use category");
|
||||
t.equal(logger.level, levels.DEBUG, "should use level");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('isLevelEnabled', (t) => {
|
||||
const logger = new Logger('cheese');
|
||||
batch.test("isLevelEnabled", t => {
|
||||
const logger = new Logger("cheese");
|
||||
const functions = [
|
||||
'isTraceEnabled', 'isDebugEnabled', 'isInfoEnabled',
|
||||
'isWarnEnabled', 'isErrorEnabled', 'isFatalEnabled'
|
||||
"isTraceEnabled",
|
||||
"isDebugEnabled",
|
||||
"isInfoEnabled",
|
||||
"isWarnEnabled",
|
||||
"isErrorEnabled",
|
||||
"isFatalEnabled"
|
||||
];
|
||||
t.test('should provide a level enabled function for all levels', (subtest) => {
|
||||
subtest.plan(functions.length);
|
||||
functions.forEach((fn) => {
|
||||
subtest.type(logger[fn], 'function');
|
||||
});
|
||||
});
|
||||
logger.level = 'INFO';
|
||||
t.test(
|
||||
"should provide a level enabled function for all levels",
|
||||
subtest => {
|
||||
subtest.plan(functions.length);
|
||||
functions.forEach(fn => {
|
||||
subtest.type(logger[fn], "function");
|
||||
});
|
||||
}
|
||||
);
|
||||
logger.level = "INFO";
|
||||
t.notOk(logger.isTraceEnabled());
|
||||
t.notOk(logger.isDebugEnabled());
|
||||
t.ok(logger.isInfoEnabled());
|
||||
@ -80,56 +79,56 @@ test('../../lib/logger', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should send log events to dispatch function', (t) => {
|
||||
const logger = new Logger('cheese');
|
||||
logger.level = 'debug';
|
||||
logger.debug('Event 1');
|
||||
logger.debug('Event 2');
|
||||
logger.debug('Event 3');
|
||||
batch.test("should send log events to dispatch function", t => {
|
||||
const logger = new Logger("cheese");
|
||||
logger.level = "debug";
|
||||
logger.debug("Event 1");
|
||||
logger.debug("Event 2");
|
||||
logger.debug("Event 3");
|
||||
|
||||
t.equal(events.length, 3);
|
||||
t.equal(events[0].data[0], 'Event 1');
|
||||
t.equal(events[1].data[0], 'Event 2');
|
||||
t.equal(events[2].data[0], 'Event 3');
|
||||
t.equal(events[0].data[0], "Event 1");
|
||||
t.equal(events[1].data[0], "Event 2");
|
||||
t.equal(events[2].data[0], "Event 3");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should add context values to every event', (t) => {
|
||||
const logger = new Logger('fromage');
|
||||
logger.level = 'debug';
|
||||
logger.debug('Event 1');
|
||||
logger.addContext('cheese', 'edam');
|
||||
logger.debug('Event 2');
|
||||
logger.debug('Event 3');
|
||||
logger.addContext('biscuits', 'timtam');
|
||||
logger.debug('Event 4');
|
||||
logger.removeContext('cheese');
|
||||
logger.debug('Event 5');
|
||||
batch.test("should add context values to every event", t => {
|
||||
const logger = new Logger("fromage");
|
||||
logger.level = "debug";
|
||||
logger.debug("Event 1");
|
||||
logger.addContext("cheese", "edam");
|
||||
logger.debug("Event 2");
|
||||
logger.debug("Event 3");
|
||||
logger.addContext("biscuits", "timtam");
|
||||
logger.debug("Event 4");
|
||||
logger.removeContext("cheese");
|
||||
logger.debug("Event 5");
|
||||
logger.clearContext();
|
||||
logger.debug('Event 6');
|
||||
logger.debug("Event 6");
|
||||
|
||||
t.equal(events.length, 6);
|
||||
t.same(events[0].context, {});
|
||||
t.same(events[1].context, { cheese: 'edam' });
|
||||
t.same(events[2].context, { cheese: 'edam' });
|
||||
t.same(events[3].context, { cheese: 'edam', biscuits: 'timtam' });
|
||||
t.same(events[4].context, { biscuits: 'timtam' });
|
||||
t.same(events[1].context, { cheese: "edam" });
|
||||
t.same(events[2].context, { cheese: "edam" });
|
||||
t.same(events[3].context, { cheese: "edam", biscuits: "timtam" });
|
||||
t.same(events[4].context, { biscuits: "timtam" });
|
||||
t.same(events[5].context, {});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should not break when log data has no toString', (t) => {
|
||||
const logger = new Logger('thing');
|
||||
logger.level = 'debug';
|
||||
logger.info('Just testing ', Object.create(null));
|
||||
batch.test("should not break when log data has no toString", t => {
|
||||
const logger = new Logger("thing");
|
||||
logger.level = "debug";
|
||||
logger.info("Just testing ", Object.create(null));
|
||||
|
||||
t.equal(events.length, 1);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('default should disable useCallStack unless manual enable', (t) => {
|
||||
const logger = new Logger('stack');
|
||||
logger.level = 'debug';
|
||||
batch.test("default should disable useCallStack unless manual enable", t => {
|
||||
const logger = new Logger("stack");
|
||||
logger.level = "debug";
|
||||
|
||||
t.equal(logger.useCallStack, false);
|
||||
|
||||
@ -139,7 +138,7 @@ test('../../lib/logger', (batch) => {
|
||||
logger.useCallStack = 0;
|
||||
t.equal(logger.useCallStack, false);
|
||||
|
||||
logger.useCallStack = '';
|
||||
logger.useCallStack = "";
|
||||
t.equal(logger.useCallStack, false);
|
||||
|
||||
logger.useCallStack = null;
|
||||
@ -148,7 +147,7 @@ test('../../lib/logger', (batch) => {
|
||||
logger.useCallStack = undefined;
|
||||
t.equal(logger.useCallStack, false);
|
||||
|
||||
logger.useCallStack = 'true';
|
||||
logger.useCallStack = "true";
|
||||
t.equal(logger.useCallStack, false);
|
||||
|
||||
logger.useCallStack = true;
|
||||
@ -156,91 +155,94 @@ test('../../lib/logger', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should correctly switch on/off useCallStack', (t) => {
|
||||
const logger = new Logger('stack');
|
||||
logger.level = 'debug';
|
||||
batch.test("should correctly switch on/off useCallStack", t => {
|
||||
const logger = new Logger("stack");
|
||||
logger.level = "debug";
|
||||
logger.useCallStack = true;
|
||||
t.equal(logger.useCallStack, true);
|
||||
|
||||
logger.info('hello world');
|
||||
logger.info("hello world");
|
||||
const callsite = callsites()[0];
|
||||
|
||||
t.equal(events.length, 1);
|
||||
t.equal(events[0].data[0], 'hello world');
|
||||
t.equal(events[0].data[0], "hello world");
|
||||
t.equal(events[0].fileName, callsite.getFileName());
|
||||
t.equal(events[0].lineNumber, callsite.getLineNumber() - 1);
|
||||
t.equal(events[0].columnNumber, 12);
|
||||
|
||||
logger.useCallStack = false;
|
||||
logger.info('disabled');
|
||||
logger.info("disabled");
|
||||
t.equal(logger.useCallStack, false);
|
||||
t.equal(events[1].data[0], 'disabled');
|
||||
t.equal(events[1].data[0], "disabled");
|
||||
t.equal(events[1].fileName, undefined);
|
||||
t.equal(events[1].lineNumber, undefined);
|
||||
t.equal(events[1].columnNumber, undefined);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('Once switch on/off useCallStack will apply all same category loggers', (t) => {
|
||||
const logger1 = new Logger('stack');
|
||||
logger1.level = 'debug';
|
||||
logger1.useCallStack = true;
|
||||
const logger2 = new Logger('stack');
|
||||
logger2.level = 'debug';
|
||||
batch.test(
|
||||
"Once switch on/off useCallStack will apply all same category loggers",
|
||||
t => {
|
||||
const logger1 = new Logger("stack");
|
||||
logger1.level = "debug";
|
||||
logger1.useCallStack = true;
|
||||
const logger2 = new Logger("stack");
|
||||
logger2.level = "debug";
|
||||
|
||||
logger1.info('hello world');
|
||||
const callsite = callsites()[0];
|
||||
logger1.info("hello world");
|
||||
const callsite = callsites()[0];
|
||||
|
||||
t.equal(logger1.useCallStack, true);
|
||||
t.equal(events.length, 1);
|
||||
t.equal(events[0].data[0], 'hello world');
|
||||
t.equal(events[0].fileName, callsite.getFileName());
|
||||
t.equal(events[0].lineNumber, callsite.getLineNumber() - 1);
|
||||
t.equal(events[0].columnNumber, 13);
|
||||
t.equal(logger1.useCallStack, true);
|
||||
t.equal(events.length, 1);
|
||||
t.equal(events[0].data[0], "hello world");
|
||||
t.equal(events[0].fileName, callsite.getFileName());
|
||||
t.equal(events[0].lineNumber, callsite.getLineNumber() - 1);
|
||||
t.equal(events[0].columnNumber, 15); // col of the '.' in logger1.info(...)
|
||||
|
||||
logger2.info('hello world');
|
||||
const callsite2 = callsites()[0];
|
||||
logger2.info("hello world");
|
||||
const callsite2 = callsites()[0];
|
||||
|
||||
t.equal(logger2.useCallStack, true);
|
||||
t.equal(events[1].data[0], 'hello world');
|
||||
t.equal(events[1].fileName, callsite2.getFileName());
|
||||
t.equal(events[1].lineNumber, callsite2.getLineNumber() - 1);
|
||||
t.equal(events[1].columnNumber, 13);
|
||||
t.equal(logger2.useCallStack, true);
|
||||
t.equal(events[1].data[0], "hello world");
|
||||
t.equal(events[1].fileName, callsite2.getFileName());
|
||||
t.equal(events[1].lineNumber, callsite2.getLineNumber() - 1);
|
||||
t.equal(events[1].columnNumber, 15); // col of the '.' in logger1.info(...)
|
||||
|
||||
logger1.useCallStack = false;
|
||||
logger2.info('hello world');
|
||||
t.equal(logger2.useCallStack, false);
|
||||
t.equal(events[2].data[0], 'hello world');
|
||||
t.equal(events[2].fileName, undefined);
|
||||
t.equal(events[2].lineNumber, undefined);
|
||||
t.equal(events[2].columnNumber, undefined);
|
||||
logger1.useCallStack = false;
|
||||
logger2.info("hello world");
|
||||
t.equal(logger2.useCallStack, false);
|
||||
t.equal(events[2].data[0], "hello world");
|
||||
t.equal(events[2].fileName, undefined);
|
||||
t.equal(events[2].lineNumber, undefined);
|
||||
t.equal(events[2].columnNumber, undefined);
|
||||
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('should correctly change the parseCallStack function', (t) => {
|
||||
const logger = new Logger('stack');
|
||||
const parseFunction = function () {
|
||||
batch.test("should correctly change the parseCallStack function", t => {
|
||||
const logger = new Logger("stack");
|
||||
const parseFunction = function() {
|
||||
return {
|
||||
functionName: 'test function name',
|
||||
fileName: 'test file name',
|
||||
functionName: "test function name",
|
||||
fileName: "test file name",
|
||||
lineNumber: 15,
|
||||
columnNumber: 25,
|
||||
callStack: 'test callstack',
|
||||
callStack: "test callstack"
|
||||
};
|
||||
};
|
||||
logger.level = 'debug';
|
||||
logger.level = "debug";
|
||||
logger.useCallStack = true;
|
||||
logger.setParseCallStackFunction(parseFunction);
|
||||
|
||||
t.equal(logger.parseCallStack, parseFunction);
|
||||
|
||||
logger.info('test parseCallStack');
|
||||
t.equal(events[0].functionName, 'test function name');
|
||||
t.equal(events[0].fileName, 'test file name');
|
||||
logger.info("test parseCallStack");
|
||||
t.equal(events[0].functionName, "test function name");
|
||||
t.equal(events[0].fileName, "test file name");
|
||||
t.equal(events[0].lineNumber, 15);
|
||||
t.equal(events[0].columnNumber, 25);
|
||||
t.equal(events[0].callStack, 'test callstack');
|
||||
t.equal(events[0].callStack, "test callstack");
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -1,186 +1,220 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const util = require("util");
|
||||
const recording = require("../../lib/appenders/recording");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const recording = require('../../lib/appenders/recording');
|
||||
|
||||
test('log4js', (batch) => {
|
||||
batch.test('getLogger', (t) => {
|
||||
const log4js = require('../../lib/log4js');
|
||||
test("log4js", batch => {
|
||||
batch.test("getLogger", t => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
log4js.configure({
|
||||
appenders: { recorder: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } }
|
||||
appenders: { recorder: { type: "recording" } },
|
||||
categories: { default: { appenders: ["recorder"], level: "DEBUG" } }
|
||||
});
|
||||
const logger = log4js.getLogger('tests');
|
||||
const logger = log4js.getLogger("tests");
|
||||
|
||||
t.test('should take a category and return a logger', (assert) => {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), 'DEBUG');
|
||||
assert.type(logger.debug, 'function');
|
||||
assert.type(logger.info, 'function');
|
||||
assert.type(logger.warn, 'function');
|
||||
assert.type(logger.error, 'function');
|
||||
assert.type(logger.fatal, 'function');
|
||||
t.test("should take a category and return a logger", assert => {
|
||||
assert.equal(logger.category, "tests");
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
assert.type(logger.debug, "function");
|
||||
assert.type(logger.info, "function");
|
||||
assert.type(logger.warn, "function");
|
||||
assert.type(logger.error, "function");
|
||||
assert.type(logger.fatal, "function");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('log events', (assert) => {
|
||||
t.test("log events", assert => {
|
||||
recording.reset();
|
||||
|
||||
logger.debug('Debug event');
|
||||
logger.trace('Trace event 1');
|
||||
logger.trace('Trace event 2');
|
||||
logger.warn('Warning event');
|
||||
logger.error('Aargh!', new Error('Pants are on fire!'));
|
||||
logger.error('Simulated CouchDB problem', { err: 127, cause: 'incendiary underwear' });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", {
|
||||
err: 127,
|
||||
cause: "incendiary underwear"
|
||||
});
|
||||
|
||||
const events = recording.replay();
|
||||
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.type(events[0].startTime, 'Date');
|
||||
assert.equal(events[0].level.toString(), "DEBUG");
|
||||
assert.equal(events[0].data[0], "Debug event");
|
||||
assert.type(events[0].startTime, "Date");
|
||||
|
||||
assert.equal(events.length, 4, 'should not emit events of a lower level');
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
assert.equal(events.length, 4, "should not emit events of a lower level");
|
||||
assert.equal(events[1].level.toString(), "WARN");
|
||||
|
||||
assert.type(events[2].data[1], 'Error', 'should include the error if passed in');
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
assert.type(
|
||||
events[2].data[1],
|
||||
"Error",
|
||||
"should include the error if passed in"
|
||||
);
|
||||
assert.equal(events[2].data[1].message, "Pants are on fire!");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('when shutdown is called', (t) => {
|
||||
batch.test("when shutdown is called", t => {
|
||||
const events = {
|
||||
appenderShutdownCalled: false
|
||||
shutdownCalled: []
|
||||
};
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file': {
|
||||
name: 'file',
|
||||
configure: function () {
|
||||
function thing() {
|
||||
return null;
|
||||
}
|
||||
|
||||
thing.shutdown = function (cb) {
|
||||
events.appenderShutdownCalled = true;
|
||||
cb();
|
||||
};
|
||||
return thing;
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
"./appenders/file": {
|
||||
name: "file",
|
||||
configure() {
|
||||
function thing(evt) {
|
||||
events.event = evt;
|
||||
return null;
|
||||
}
|
||||
|
||||
thing.shutdown = function(cb) {
|
||||
events.shutdownCalled.push(true);
|
||||
cb();
|
||||
};
|
||||
return thing;
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
const config = {
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file',
|
||||
filename: 'cheesy-wotsits.log',
|
||||
type: "file",
|
||||
filename: "cheesy-wotsits.log",
|
||||
maxLogSize: 1024,
|
||||
backups: 3
|
||||
},
|
||||
alsoFile: {
|
||||
type: "file"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'DEBUG' } }
|
||||
categories: {
|
||||
default: { appenders: ["file", "alsoFile"], level: "DEBUG" }
|
||||
}
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
const logger = log4js.getLogger();
|
||||
log4js.shutdown(() => {
|
||||
t.ok(events.appenderShutdownCalled, 'should invoke appender shutdowns');
|
||||
t.equal(
|
||||
events.shutdownCalled.length,
|
||||
2,
|
||||
"should invoke appender shutdowns"
|
||||
);
|
||||
logger.info("this should not go to the appenders");
|
||||
t.notOk(events.event);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('configuration when passed as filename', (t) => {
|
||||
batch.test("configuration when passed as filename", t => {
|
||||
let appenderConfig;
|
||||
let configFilename;
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
ignoreMissing: true,
|
||||
requires: {
|
||||
fs: {
|
||||
statSync: function () {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
readFileSync: function (filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file',
|
||||
filename: 'whatever.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'DEBUG' } }
|
||||
});
|
||||
},
|
||||
readdirSync: function () {
|
||||
return ['file'];
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
ignoreMissing: true,
|
||||
requires: {
|
||||
fs: {
|
||||
statSync() {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
'./file': {
|
||||
configure: function (configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function () {
|
||||
};
|
||||
}
|
||||
readFileSync(filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
appenders: {
|
||||
file: {
|
||||
type: "file",
|
||||
filename: "whatever.log"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["file"], level: "DEBUG" } }
|
||||
});
|
||||
},
|
||||
readdirSync() {
|
||||
return ["file"];
|
||||
}
|
||||
},
|
||||
"./file": {
|
||||
configure(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
log4js.configure('/path/to/cheese.json');
|
||||
t.equal(configFilename, '/path/to/cheese.json', 'should read the config from a file');
|
||||
t.equal(appenderConfig.filename, 'whatever.log', 'should pass config to appender');
|
||||
log4js.configure("/path/to/cheese.json");
|
||||
t.equal(
|
||||
configFilename,
|
||||
"/path/to/cheese.json",
|
||||
"should read the config from a file"
|
||||
);
|
||||
t.equal(
|
||||
appenderConfig.filename,
|
||||
"whatever.log",
|
||||
"should pass config to appender"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('with configure not called', (t) => {
|
||||
batch.test("with configure not called", t => {
|
||||
const fakeStdoutAppender = {
|
||||
configure: function () {
|
||||
configure() {
|
||||
this.required = true;
|
||||
return function (evt) {
|
||||
return function(evt) {
|
||||
fakeStdoutAppender.evt = evt;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/stdout': fakeStdoutAppender
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
"./appenders/stdout": fakeStdoutAppender
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger('some-logger');
|
||||
logger.debug('This is a test');
|
||||
t.ok(fakeStdoutAppender.required, 'stdout should be required');
|
||||
t.notOk(fakeStdoutAppender.evt, 'should not log anything');
|
||||
const logger = log4js.getLogger("some-logger");
|
||||
logger.debug("This is a test");
|
||||
t.ok(fakeStdoutAppender.required, "stdout should be required");
|
||||
t.notOk(fakeStdoutAppender.evt, "should not log anything");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('configuration persistence', (t) => {
|
||||
const firstLog4js = require('../../lib/log4js');
|
||||
batch.test("with configure called with empty values", t => {
|
||||
[null, undefined, "", " ", []].forEach(config => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
const expectedError = `Problem reading config from file "${util.inspect(
|
||||
config
|
||||
)}". Error was ENOENT: no such file or directory`;
|
||||
t.throws(() => log4js.configure(config), expectedError);
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("configuration persistence", t => {
|
||||
const firstLog4js = require("../../lib/log4js");
|
||||
firstLog4js.configure({
|
||||
appenders: { recorder: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['recorder'], level: 'DEBUG' } }
|
||||
appenders: { recorder: { type: "recording" } },
|
||||
categories: { default: { appenders: ["recorder"], level: "DEBUG" } }
|
||||
});
|
||||
recording.reset();
|
||||
|
||||
const secondLog4js = require('../../lib/log4js');
|
||||
secondLog4js.getLogger().info('This should go to the appender defined in firstLog4js');
|
||||
const secondLog4js = require("../../lib/log4js");
|
||||
secondLog4js
|
||||
.getLogger()
|
||||
.info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
t.equal(recording.replay()[0].data[0], 'This should go to the appender defined in firstLog4js');
|
||||
t.equal(
|
||||
recording.replay()[0].data[0],
|
||||
"This should go to the appender defined in firstLog4js"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
@ -1,59 +1,69 @@
|
||||
'use strict';
|
||||
const process = require("process");
|
||||
const { test } = require("tap");
|
||||
const debug = require("debug");
|
||||
const fs = require("fs");
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
const process = require('process');
|
||||
const test = require('tap').test;
|
||||
const debug = require('debug');
|
||||
const fs = require('fs');
|
||||
const log4js = require('../../lib/log4js');
|
||||
test("multiFile appender", batch => {
|
||||
batch.test(
|
||||
"should write to multiple files based on the loggingEvent property",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "categoryName",
|
||||
extension: ".log"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["multi"], level: "info" } }
|
||||
});
|
||||
const loggerA = log4js.getLogger("A");
|
||||
const loggerB = log4js.getLogger("B");
|
||||
loggerA.info("I am in logger A");
|
||||
loggerB.info("I am in logger B");
|
||||
log4js.shutdown(() => {
|
||||
t.contains(fs.readFileSync("logs/A.log", "utf-8"), "I am in logger A");
|
||||
t.contains(fs.readFileSync("logs/B.log", "utf-8"), "I am in logger B");
|
||||
t.end();
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
test('multiFile appender', (batch) => {
|
||||
batch.test('should write to multiple files based on the loggingEvent property', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerA = log4js.getLogger('A');
|
||||
const loggerB = log4js.getLogger('B');
|
||||
loggerA.info('I am in logger A');
|
||||
loggerB.info('I am in logger B');
|
||||
log4js.shutdown(() => {
|
||||
t.contains(fs.readFileSync('logs/A.log', 'utf-8'), 'I am in logger A');
|
||||
t.contains(fs.readFileSync('logs/B.log', 'utf-8'), 'I am in logger B');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
batch.test(
|
||||
"should write to multiple files based on loggingEvent.context properties",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "label",
|
||||
extension: ".log"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["multi"], level: "info" } }
|
||||
});
|
||||
const loggerC = log4js.getLogger("cheese");
|
||||
const loggerD = log4js.getLogger("biscuits");
|
||||
loggerC.addContext("label", "C");
|
||||
loggerD.addContext("label", "D");
|
||||
loggerC.info("I am in logger C");
|
||||
loggerD.info("I am in logger D");
|
||||
log4js.shutdown(() => {
|
||||
t.contains(fs.readFileSync("logs/C.log", "utf-8"), "I am in logger C");
|
||||
t.contains(fs.readFileSync("logs/D.log", "utf-8"), "I am in logger D");
|
||||
t.end();
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('should write to multiple files based on loggingEvent.context properties', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'label', extension: '.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerC = log4js.getLogger('cheese');
|
||||
const loggerD = log4js.getLogger('biscuits');
|
||||
loggerC.addContext('label', 'C');
|
||||
loggerD.addContext('label', 'D');
|
||||
loggerC.info('I am in logger C');
|
||||
loggerD.info('I am in logger D');
|
||||
log4js.shutdown(() => {
|
||||
t.contains(fs.readFileSync('logs/C.log', 'utf-8'), 'I am in logger C');
|
||||
t.contains(fs.readFileSync('logs/D.log', 'utf-8'), 'I am in logger D');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('should close file after timeout', (t) => {
|
||||
batch.test("should close file after timeout", t => {
|
||||
/* checking that the file is closed after a timeout is done by looking at the debug logs
|
||||
since detecting file locks with node.js is platform specific.
|
||||
*/
|
||||
const debugWasEnabled = debug.enabled('log4js:multiFile');
|
||||
const debugWasEnabled = debug.enabled("log4js:multiFile");
|
||||
const debugLogs = [];
|
||||
const originalWrite = process.stderr.write;
|
||||
process.stderr.write = (string, encoding, fd) => {
|
||||
@ -62,121 +72,140 @@ test('multiFile appender', (batch) => {
|
||||
originalWrite.apply(process.stderr, [string, encoding, fd]);
|
||||
}
|
||||
};
|
||||
debug.enable('log4js:multiFile');
|
||||
debug.enable("log4js:multiFile");
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'label', extension: '.log', timeout: 20
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "label",
|
||||
extension: ".log",
|
||||
timeout: 20
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
categories: { default: { appenders: ["multi"], level: "info" } }
|
||||
});
|
||||
const loggerC = log4js.getLogger('cheese');
|
||||
loggerC.addContext('label', 'C');
|
||||
loggerC.info('I am in logger C');
|
||||
const loggerC = log4js.getLogger("cheese");
|
||||
loggerC.addContext("label", "C");
|
||||
loggerC.info("I am in logger C");
|
||||
setTimeout(() => {
|
||||
t.contains(debugLogs[debugLogs.length - 1], 'C not used for > 20 ms => close');
|
||||
t.contains(
|
||||
debugLogs[debugLogs.length - 1],
|
||||
"C not used for > 20 ms => close"
|
||||
);
|
||||
if (!debugWasEnabled) {
|
||||
debug.disable('log4js:multiFile');
|
||||
debug.disable("log4js:multiFile");
|
||||
}
|
||||
process.stderr.write = originalWrite;
|
||||
t.end();
|
||||
}, 50);
|
||||
});
|
||||
|
||||
batch.test('should fail silently if loggingEvent property has no value', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'label', extension: '.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerE = log4js.getLogger();
|
||||
loggerE.addContext('label', 'E');
|
||||
loggerE.info('I am in logger E');
|
||||
loggerE.removeContext('label');
|
||||
loggerE.info('I am not in logger E');
|
||||
loggerE.addContext('label', null);
|
||||
loggerE.info('I am also not in logger E');
|
||||
log4js.shutdown(() => {
|
||||
const contents = fs.readFileSync('logs/E.log', 'utf-8');
|
||||
t.contains(contents, 'I am in logger E');
|
||||
t.notMatch(contents, 'I am not in logger E');
|
||||
t.notMatch(contents, 'I am also not in logger E');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
batch.test(
|
||||
"should fail silently if loggingEvent property has no value",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "label",
|
||||
extension: ".log"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["multi"], level: "info" } }
|
||||
});
|
||||
const loggerE = log4js.getLogger();
|
||||
loggerE.addContext("label", "E");
|
||||
loggerE.info("I am in logger E");
|
||||
loggerE.removeContext("label");
|
||||
loggerE.info("I am not in logger E");
|
||||
loggerE.addContext("label", null);
|
||||
loggerE.info("I am also not in logger E");
|
||||
log4js.shutdown(() => {
|
||||
const contents = fs.readFileSync("logs/E.log", "utf-8");
|
||||
t.contains(contents, "I am in logger E");
|
||||
t.notMatch(contents, "I am not in logger E");
|
||||
t.notMatch(contents, "I am also not in logger E");
|
||||
t.end();
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
batch.test('should pass options to rolling file stream', (t) => {
|
||||
batch.test("should pass options to rolling file stream", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile',
|
||||
base: 'logs/',
|
||||
property: 'label',
|
||||
extension: '.log',
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "label",
|
||||
extension: ".log",
|
||||
maxLogSize: 61,
|
||||
backups: 2
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
categories: { default: { appenders: ["multi"], level: "info" } }
|
||||
});
|
||||
const loggerF = log4js.getLogger();
|
||||
loggerF.addContext('label', 'F');
|
||||
loggerF.info('Being in logger F is the best');
|
||||
loggerF.info('I am also in logger F');
|
||||
loggerF.info('I am in logger F');
|
||||
loggerF.addContext("label", "F");
|
||||
loggerF.info("Being in logger F is the best");
|
||||
loggerF.info("I am also in logger F");
|
||||
loggerF.info("I am in logger F");
|
||||
log4js.shutdown(() => {
|
||||
let contents = fs.readFileSync('logs/F.log', 'utf-8');
|
||||
t.contains(contents, 'I am in logger F');
|
||||
contents = fs.readFileSync('logs/F.log.1', 'utf-8');
|
||||
t.contains(contents, 'I am also in logger F');
|
||||
contents = fs.readFileSync('logs/F.log.2', 'utf-8');
|
||||
t.contains(contents, 'Being in logger F is the best');
|
||||
let contents = fs.readFileSync("logs/F.log", "utf-8");
|
||||
t.contains(contents, "I am in logger F");
|
||||
contents = fs.readFileSync("logs/F.log.1", "utf-8");
|
||||
t.contains(contents, "I am also in logger F");
|
||||
contents = fs.readFileSync("logs/F.log.2", "utf-8");
|
||||
t.contains(contents, "Being in logger F is the best");
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('should inherit config from category hierarchy', (t) => {
|
||||
batch.test("should inherit config from category hierarchy", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
out: { type: 'stdout' },
|
||||
out: { type: "stdout" },
|
||||
test: {
|
||||
type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log'
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "categoryName",
|
||||
extension: ".log"
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['out'], level: 'info' },
|
||||
test: { appenders: ['test'], level: 'debug' }
|
||||
default: { appenders: ["out"], level: "info" },
|
||||
test: { appenders: ["test"], level: "debug" }
|
||||
}
|
||||
});
|
||||
|
||||
const testLogger = log4js.getLogger('test.someTest');
|
||||
testLogger.debug('This should go to the file');
|
||||
const testLogger = log4js.getLogger("test.someTest");
|
||||
testLogger.debug("This should go to the file");
|
||||
log4js.shutdown(() => {
|
||||
const contents = fs.readFileSync('logs/test.someTest.log', 'utf-8');
|
||||
t.contains(contents, 'This should go to the file');
|
||||
const contents = fs.readFileSync("logs/test.someTest.log", "utf-8");
|
||||
t.contains(contents, "This should go to the file");
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('should shutdown safely even if it is not used', (t) => {
|
||||
batch.test("should shutdown safely even if it is not used", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
out: { type: 'stdout' },
|
||||
out: { type: "stdout" },
|
||||
test: {
|
||||
type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log'
|
||||
type: "multiFile",
|
||||
base: "logs/",
|
||||
property: "categoryName",
|
||||
extension: ".log"
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['out'], level: 'info' },
|
||||
test: { appenders: ['test'], level: 'debug' }
|
||||
default: { appenders: ["out"], level: "info" },
|
||||
test: { appenders: ["test"], level: "debug" }
|
||||
}
|
||||
});
|
||||
log4js.shutdown(() => {
|
||||
t.ok('callback is called');
|
||||
t.ok("callback is called");
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,23 +1,21 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const net = require("net");
|
||||
const childProcess = require("child_process");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
const test = require('tap').test;
|
||||
const net = require('net');
|
||||
const childProcess = require('child_process');
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const log4js = require('../../lib/log4js');
|
||||
|
||||
test('multiprocess appender shutdown (master)', { timeout: 2000 }, (t) => {
|
||||
test("multiprocess appender shutdown (master)", { timeout: 2000 }, t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
stdout: { type: 'stdout' },
|
||||
stdout: { type: "stdout" },
|
||||
multi: {
|
||||
type: 'multiprocess',
|
||||
mode: 'master',
|
||||
type: "multiprocess",
|
||||
mode: "master",
|
||||
loggerPort: 12345,
|
||||
appender: 'stdout'
|
||||
appender: "stdout"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["multi"], level: "debug" } }
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
@ -25,11 +23,11 @@ test('multiprocess appender shutdown (master)', { timeout: 2000 }, (t) => {
|
||||
setTimeout(() => {
|
||||
net
|
||||
.connect({ port: 12345 }, () => {
|
||||
t.fail('connection should not still work');
|
||||
t.fail("connection should not still work");
|
||||
t.end();
|
||||
})
|
||||
.on('error', (err) => {
|
||||
t.ok(err, 'we got a connection error');
|
||||
.on("error", err => {
|
||||
t.ok(err, "we got a connection error");
|
||||
t.end();
|
||||
});
|
||||
}, 250);
|
||||
@ -37,38 +35,42 @@ test('multiprocess appender shutdown (master)', { timeout: 2000 }, (t) => {
|
||||
}, 250);
|
||||
});
|
||||
|
||||
test('multiprocess appender shutdown (worker)', (t) => {
|
||||
test("multiprocess appender shutdown (worker)", t => {
|
||||
const fakeConnection = {
|
||||
evts: {},
|
||||
msgs: [],
|
||||
on: function (evt, cb) {
|
||||
on(evt, cb) {
|
||||
this.evts[evt] = cb;
|
||||
},
|
||||
write: function (data) {
|
||||
write(data) {
|
||||
this.msgs.push(data);
|
||||
},
|
||||
removeAllListeners: function () {
|
||||
removeAllListeners() {
|
||||
this.removeAllListenersCalled = true;
|
||||
},
|
||||
end: function (cb) {
|
||||
end(cb) {
|
||||
this.endCb = cb;
|
||||
}
|
||||
};
|
||||
const logLib = sandbox.require('../../lib/log4js', {
|
||||
const logLib = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: {
|
||||
createConnection: function () {
|
||||
createConnection() {
|
||||
return fakeConnection;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
logLib.configure({
|
||||
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
|
||||
categories: { default: { appenders: ['worker'], level: 'debug' } }
|
||||
appenders: { worker: { type: "multiprocess", mode: "worker" } },
|
||||
categories: { default: { appenders: ["worker"], level: "debug" } }
|
||||
});
|
||||
|
||||
logLib.getLogger().info('Putting something in the buffer before the connection is established');
|
||||
logLib
|
||||
.getLogger()
|
||||
.info(
|
||||
"Putting something in the buffer before the connection is established"
|
||||
);
|
||||
// nothing been written yet.
|
||||
t.equal(fakeConnection.msgs.length, 0);
|
||||
|
||||
@ -92,32 +94,32 @@ test('multiprocess appender shutdown (worker)', (t) => {
|
||||
}, 500);
|
||||
});
|
||||
|
||||
test('multiprocess appender crash (worker)', (t) => {
|
||||
test("multiprocess appender crash (worker)", t => {
|
||||
const loggerPort = 12346;
|
||||
const vcr = require('../../lib/appenders/recording');
|
||||
const vcr = require("../../lib/appenders/recording");
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
console: { type: 'recording' },
|
||||
console: { type: "recording" },
|
||||
multi: {
|
||||
type: 'multiprocess',
|
||||
mode: 'master',
|
||||
loggerPort: loggerPort,
|
||||
appender: 'console'
|
||||
type: "multiprocess",
|
||||
mode: "master",
|
||||
loggerPort,
|
||||
appender: "console"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'debug' } }
|
||||
categories: { default: { appenders: ["multi"], level: "debug" } }
|
||||
});
|
||||
|
||||
const worker = childProcess.fork(require.resolve('../multiprocess-worker'), [
|
||||
'start-multiprocess-worker',
|
||||
const worker = childProcess.fork(require.resolve("../multiprocess-worker"), [
|
||||
"start-multiprocess-worker",
|
||||
loggerPort
|
||||
]);
|
||||
|
||||
worker.on('message', (m) => {
|
||||
if (m === 'worker is done') {
|
||||
worker.on("message", m => {
|
||||
if (m === "worker is done") {
|
||||
setTimeout(() => {
|
||||
worker.kill();
|
||||
t.equal(vcr.replay()[0].data[0], 'Logging from worker');
|
||||
t.equal(vcr.replay()[0].data[0], "Logging from worker");
|
||||
log4js.shutdown(() => t.end());
|
||||
}, 100);
|
||||
}
|
||||
|
||||
@ -1,48 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const flatted = require('flatted');
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const recording = require('../../lib/appenders/recording');
|
||||
const { test } = require("tap");
|
||||
const flatted = require("flatted");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const recording = require("../../lib/appenders/recording");
|
||||
|
||||
function makeFakeNet() {
|
||||
return {
|
||||
data: [],
|
||||
cbs: {},
|
||||
createConnectionCalled: 0,
|
||||
createConnection: function (port, host) {
|
||||
createConnection(port, host) {
|
||||
const fakeNet = this;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
this.createConnectionCalled += 1;
|
||||
return {
|
||||
on: function (evt, cb) {
|
||||
on(evt, cb) {
|
||||
fakeNet.cbs[evt] = cb;
|
||||
},
|
||||
write: function (data, encoding) {
|
||||
write(data, encoding) {
|
||||
fakeNet.data.push(data);
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
end: function () {
|
||||
end() {
|
||||
fakeNet.closeCalled = true;
|
||||
}
|
||||
};
|
||||
},
|
||||
createServer: function (cb) {
|
||||
createServer(cb) {
|
||||
const fakeNet = this;
|
||||
cb({
|
||||
remoteAddress: '1.2.3.4',
|
||||
remotePort: '1234',
|
||||
setEncoding: function (encoding) {
|
||||
remoteAddress: "1.2.3.4",
|
||||
remotePort: "1234",
|
||||
setEncoding(encoding) {
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
on: function (event, cb2) {
|
||||
on(event, cb2) {
|
||||
fakeNet.cbs[event] = cb2;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
listen: function (port, host) {
|
||||
listen(port, host) {
|
||||
fakeNet.port = port;
|
||||
fakeNet.host = host;
|
||||
}
|
||||
@ -51,73 +49,79 @@ function makeFakeNet() {
|
||||
};
|
||||
}
|
||||
|
||||
test('Multiprocess Appender', (batch) => {
|
||||
batch.beforeEach((done) => {
|
||||
test("Multiprocess Appender", batch => {
|
||||
batch.beforeEach(done => {
|
||||
recording.erase();
|
||||
done();
|
||||
});
|
||||
|
||||
batch.test('worker', (t) => {
|
||||
batch.test("worker", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
worker: {
|
||||
type: 'multiprocess', mode: 'worker', loggerPort: 1234, loggerHost: 'pants'
|
||||
type: "multiprocess",
|
||||
mode: "worker",
|
||||
loggerPort: 1234,
|
||||
loggerHost: "pants"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['worker'], level: 'trace' } }
|
||||
categories: { default: { appenders: ["worker"], level: "trace" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.info('before connect');
|
||||
logger.info("before connect");
|
||||
fakeNet.cbs.connect();
|
||||
logger.info('after connect');
|
||||
logger.info("after connect");
|
||||
fakeNet.cbs.close(true);
|
||||
logger.info('after error, before connect');
|
||||
logger.info("after error, before connect");
|
||||
fakeNet.cbs.connect();
|
||||
logger.info('after error, after connect');
|
||||
logger.error(new Error('Error test'));
|
||||
logger.info("after error, after connect");
|
||||
logger.error(new Error("Error test"));
|
||||
|
||||
const net = fakeNet;
|
||||
t.test('should open a socket to the loggerPort and loggerHost', (assert) => {
|
||||
t.test("should open a socket to the loggerPort and loggerHost", assert => {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, 'pants');
|
||||
assert.equal(net.host, "pants");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should buffer messages written before socket is connected', (assert) => {
|
||||
assert.include(net.data[0], 'before connect');
|
||||
assert.end();
|
||||
});
|
||||
t.test(
|
||||
"should buffer messages written before socket is connected",
|
||||
assert => {
|
||||
assert.include(net.data[0], "before connect");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
t.test('should write log messages to socket as flatted strings with a terminator string', (assert) => {
|
||||
assert.include(net.data[0], 'before connect');
|
||||
assert.equal(net.data[1], '__LOG4JS__');
|
||||
assert.include(net.data[2], 'after connect');
|
||||
assert.equal(net.data[3], '__LOG4JS__');
|
||||
assert.equal(net.encoding, 'utf8');
|
||||
assert.end();
|
||||
});
|
||||
t.test(
|
||||
"should write log messages to socket as flatted strings with a terminator string",
|
||||
assert => {
|
||||
assert.include(net.data[0], "before connect");
|
||||
assert.equal(net.data[1], "__LOG4JS__");
|
||||
assert.include(net.data[2], "after connect");
|
||||
assert.equal(net.data[3], "__LOG4JS__");
|
||||
assert.equal(net.encoding, "utf8");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
t.test('should attempt to re-open the socket on error', (assert) => {
|
||||
assert.include(net.data[4], 'after error, before connect');
|
||||
assert.equal(net.data[5], '__LOG4JS__');
|
||||
assert.include(net.data[6], 'after error, after connect');
|
||||
assert.equal(net.data[7], '__LOG4JS__');
|
||||
t.test("should attempt to re-open the socket on error", assert => {
|
||||
assert.include(net.data[4], "after error, before connect");
|
||||
assert.equal(net.data[5], "__LOG4JS__");
|
||||
assert.include(net.data[6], "after error, after connect");
|
||||
assert.equal(net.data[7], "__LOG4JS__");
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should serialize an Error correctly', (assert) => {
|
||||
t.test("should serialize an Error correctly", assert => {
|
||||
assert.ok(
|
||||
flatted.parse(net.data[8]).data[0].stack,
|
||||
`Expected:\n\n${net.data[8]}\n\n to have a 'data[0].stack' property`
|
||||
@ -130,236 +134,237 @@ test('Multiprocess Appender', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('worker with timeout', (t) => {
|
||||
batch.test("worker with timeout", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
|
||||
categories: { default: { appenders: ['worker'], level: 'trace' } }
|
||||
appenders: { worker: { type: "multiprocess", mode: "worker" } },
|
||||
categories: { default: { appenders: ["worker"], level: "trace" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.info('before connect');
|
||||
logger.info("before connect");
|
||||
fakeNet.cbs.connect();
|
||||
logger.info('after connect');
|
||||
logger.info("after connect");
|
||||
fakeNet.cbs.timeout();
|
||||
logger.info('after timeout, before close');
|
||||
logger.info("after timeout, before close");
|
||||
fakeNet.cbs.close();
|
||||
logger.info('after close, before connect');
|
||||
logger.info("after close, before connect");
|
||||
fakeNet.cbs.connect();
|
||||
logger.info('after close, after connect');
|
||||
logger.info("after close, after connect");
|
||||
|
||||
const net = fakeNet;
|
||||
|
||||
t.test('should attempt to re-open the socket', (assert) => {
|
||||
t.test("should attempt to re-open the socket", assert => {
|
||||
// skipping the __LOG4JS__ separators
|
||||
assert.include(net.data[0], 'before connect');
|
||||
assert.include(net.data[2], 'after connect');
|
||||
assert.include(net.data[4], 'after timeout, before close');
|
||||
assert.include(net.data[6], 'after close, before connect');
|
||||
assert.include(net.data[8], 'after close, after connect');
|
||||
assert.include(net.data[0], "before connect");
|
||||
assert.include(net.data[2], "after connect");
|
||||
assert.include(net.data[4], "after timeout, before close");
|
||||
assert.include(net.data[6], "after close, before connect");
|
||||
assert.include(net.data[8], "after close, after connect");
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('worker defaults', (t) => {
|
||||
batch.test("worker defaults", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: { worker: { type: 'multiprocess', mode: 'worker' } },
|
||||
categories: { default: { appenders: ['worker'], level: 'trace' } }
|
||||
appenders: { worker: { type: "multiprocess", mode: "worker" } },
|
||||
categories: { default: { appenders: ["worker"], level: "trace" } }
|
||||
});
|
||||
|
||||
t.test('should open a socket to localhost:5000', (assert) => {
|
||||
t.test("should open a socket to localhost:5000", assert => {
|
||||
assert.equal(fakeNet.port, 5000);
|
||||
assert.equal(fakeNet.host, 'localhost');
|
||||
assert.equal(fakeNet.host, "localhost");
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('master', (t) => {
|
||||
batch.test("master", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet,
|
||||
'./appenders/recording': recording
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet,
|
||||
"./appenders/recording": recording
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
recorder: { type: "recording" },
|
||||
master: {
|
||||
type: 'multiprocess',
|
||||
mode: 'master',
|
||||
type: "multiprocess",
|
||||
mode: "master",
|
||||
loggerPort: 1234,
|
||||
loggerHost: 'server',
|
||||
appender: 'recorder'
|
||||
loggerHost: "server",
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
categories: { default: { appenders: ["master"], level: "trace" } }
|
||||
});
|
||||
|
||||
const net = fakeNet;
|
||||
|
||||
t.test('should listen for log messages on loggerPort and loggerHost', (assert) => {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, 'server');
|
||||
t.test(
|
||||
"should listen for log messages on loggerPort and loggerHost",
|
||||
assert => {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, "server");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
t.test("should return the underlying appender", assert => {
|
||||
log4js
|
||||
.getLogger()
|
||||
.info("this should be sent to the actual appender directly");
|
||||
|
||||
assert.equal(
|
||||
recording.replay()[0].data[0],
|
||||
"this should be sent to the actual appender directly"
|
||||
);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should return the underlying appender', (assert) => {
|
||||
log4js.getLogger().info('this should be sent to the actual appender directly');
|
||||
|
||||
assert.equal(recording.replay()[0].data[0], 'this should be sent to the actual appender directly');
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should log the error on "error" event', (assert) => {
|
||||
net.cbs.error(new Error('Expected error'));
|
||||
t.test('should log the error on "error" event', assert => {
|
||||
net.cbs.error(new Error("Expected error"));
|
||||
const logEvents = recording.replay();
|
||||
assert.plan(2);
|
||||
assert.equal(logEvents.length, 1);
|
||||
assert.equal('A worker log process hung up unexpectedly', logEvents[0].data[0]);
|
||||
assert.equal(
|
||||
"A worker log process hung up unexpectedly",
|
||||
logEvents[0].data[0]
|
||||
);
|
||||
});
|
||||
|
||||
t.test('when a client connects', (assert) => {
|
||||
t.test("when a client connects", assert => {
|
||||
const logString = `${flatted.stringify({
|
||||
level: { level: 10000, levelStr: 'DEBUG' },
|
||||
data: ['some debug']
|
||||
level: { level: 10000, levelStr: "DEBUG" },
|
||||
data: ["some debug"]
|
||||
})}__LOG4JS__`;
|
||||
|
||||
net.cbs.data(`${flatted.stringify({
|
||||
level: { level: 40000, levelStr: 'ERROR' },
|
||||
data: ['an error message']
|
||||
})}__LOG4JS__`);
|
||||
net.cbs.data(
|
||||
`${flatted.stringify({
|
||||
level: { level: 40000, levelStr: "ERROR" },
|
||||
data: ["an error message"]
|
||||
})}__LOG4JS__`
|
||||
);
|
||||
net.cbs.data(logString.substring(0, 10));
|
||||
net.cbs.data(logString.substring(10));
|
||||
net.cbs.data(logString + logString + logString);
|
||||
net.cbs.end(`${flatted.stringify({
|
||||
level: { level: 50000, levelStr: 'FATAL' },
|
||||
data: ["that's all folks"]
|
||||
})}__LOG4JS__`);
|
||||
net.cbs.data('bad message__LOG4JS__');
|
||||
net.cbs.end(
|
||||
`${flatted.stringify({
|
||||
level: { level: 50000, levelStr: "FATAL" },
|
||||
data: ["that's all folks"]
|
||||
})}__LOG4JS__`
|
||||
);
|
||||
net.cbs.data("bad message__LOG4JS__");
|
||||
|
||||
const logEvents = recording.replay();
|
||||
// should parse log messages into log events and send to appender
|
||||
assert.equal(logEvents[0].level.toString(), 'ERROR');
|
||||
assert.equal(logEvents[0].data[0], 'an error message');
|
||||
assert.equal(logEvents[0].remoteAddress, '1.2.3.4');
|
||||
assert.equal(logEvents[0].remotePort, '1234');
|
||||
assert.equal(logEvents[0].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[0].data[0], "an error message");
|
||||
assert.equal(logEvents[0].remoteAddress, "1.2.3.4");
|
||||
assert.equal(logEvents[0].remotePort, "1234");
|
||||
|
||||
// should parse log messages split into multiple chunks'
|
||||
assert.equal(logEvents[1].level.toString(), 'DEBUG');
|
||||
assert.equal(logEvents[1].data[0], 'some debug');
|
||||
assert.equal(logEvents[1].remoteAddress, '1.2.3.4');
|
||||
assert.equal(logEvents[1].remotePort, '1234');
|
||||
assert.equal(logEvents[1].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[1].data[0], "some debug");
|
||||
assert.equal(logEvents[1].remoteAddress, "1.2.3.4");
|
||||
assert.equal(logEvents[1].remotePort, "1234");
|
||||
|
||||
// should parse multiple log messages in a single chunk'
|
||||
assert.equal(logEvents[2].data[0], 'some debug');
|
||||
assert.equal(logEvents[3].data[0], 'some debug');
|
||||
assert.equal(logEvents[4].data[0], 'some debug');
|
||||
assert.equal(logEvents[2].data[0], "some debug");
|
||||
assert.equal(logEvents[3].data[0], "some debug");
|
||||
assert.equal(logEvents[4].data[0], "some debug");
|
||||
|
||||
// should handle log messages sent as part of end event'
|
||||
assert.equal(logEvents[5].data[0], "that's all folks");
|
||||
|
||||
// should handle unparseable log messages
|
||||
assert.equal(logEvents[6].level.toString(), 'ERROR');
|
||||
assert.equal(logEvents[6].categoryName, 'log4js');
|
||||
assert.equal(logEvents[6].data[0], 'Unable to parse log:');
|
||||
assert.equal(logEvents[6].data[1], 'bad message');
|
||||
assert.equal(logEvents[6].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[6].categoryName, "log4js");
|
||||
assert.equal(logEvents[6].data[0], "Unable to parse log:");
|
||||
assert.equal(logEvents[6].data[1], "bad message");
|
||||
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('master without actual appender throws error', (t) => {
|
||||
batch.test("master without actual appender throws error", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
);
|
||||
});
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { master: { type: 'multiprocess', mode: 'master' } },
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { master: { type: "multiprocess", mode: "master" } },
|
||||
categories: { default: { appenders: ["master"], level: "trace" } }
|
||||
}),
|
||||
new Error('multiprocess master must have an "appender" defined')
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('master with unknown appender throws error', (t) => {
|
||||
batch.test("master with unknown appender throws error", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
);
|
||||
});
|
||||
t.throws(
|
||||
() => log4js.configure({
|
||||
appenders: { master: { type: 'multiprocess', mode: 'master', appender: 'cheese' } },
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
}),
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
master: { type: "multiprocess", mode: "master", appender: "cheese" }
|
||||
},
|
||||
categories: { default: { appenders: ["master"], level: "trace" } }
|
||||
}),
|
||||
new Error('multiprocess master appender "cheese" not defined')
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('master defaults', (t) => {
|
||||
batch.test("master defaults", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
);
|
||||
});
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
stdout: { type: 'stdout' },
|
||||
master: { type: 'multiprocess', mode: 'master', appender: 'stdout' }
|
||||
stdout: { type: "stdout" },
|
||||
master: { type: "multiprocess", mode: "master", appender: "stdout" }
|
||||
},
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
categories: { default: { appenders: ["master"], level: "trace" } }
|
||||
});
|
||||
|
||||
t.test('should listen for log messages on localhost:5000', (assert) => {
|
||||
t.test("should listen for log messages on localhost:5000", assert => {
|
||||
assert.equal(fakeNet.port, 5000);
|
||||
assert.equal(fakeNet.host, 'localhost');
|
||||
assert.equal(fakeNet.host, "localhost");
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
|
||||
@ -1,262 +1,276 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recording = require("../../lib/appenders/recording");
|
||||
|
||||
const test = require('tap').test;
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recording = require('../../lib/appenders/recording');
|
||||
|
||||
test('../../lib/logger', (batch) => {
|
||||
batch.beforeEach((done) => {
|
||||
test("../../lib/logger", batch => {
|
||||
batch.beforeEach(done => {
|
||||
recording.reset();
|
||||
done();
|
||||
});
|
||||
|
||||
batch.test('creating a new log level', (t) => {
|
||||
batch.test("creating a new log level", t => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
DIAG: { value: 6000, colour: 'green' }
|
||||
DIAG: { value: 6000, colour: "green" }
|
||||
},
|
||||
appenders: {
|
||||
stdout: { type: 'stdout' }
|
||||
stdout: { type: "stdout" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['stdout'], level: 'trace' }
|
||||
default: { appenders: ["stdout"], level: "trace" }
|
||||
}
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
t.test('should export new log level in levels module', (assert) => {
|
||||
t.test("should export new log level in levels module", assert => {
|
||||
assert.ok(log4js.levels.DIAG);
|
||||
assert.equal(log4js.levels.DIAG.levelStr, 'DIAG');
|
||||
assert.equal(log4js.levels.DIAG.levelStr, "DIAG");
|
||||
assert.equal(log4js.levels.DIAG.level, 6000);
|
||||
assert.equal(log4js.levels.DIAG.colour, 'green');
|
||||
assert.equal(log4js.levels.DIAG.colour, "green");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.type(logger.diag, 'function', 'should create named function on logger prototype');
|
||||
t.type(logger.isDiagEnabled, 'function', 'should create isLevelEnabled function on logger prototype');
|
||||
t.type(logger.info, 'function', 'should retain default levels');
|
||||
t.type(
|
||||
logger.diag,
|
||||
"function",
|
||||
"should create named function on logger prototype"
|
||||
);
|
||||
t.type(
|
||||
logger.isDiagEnabled,
|
||||
"function",
|
||||
"should create isLevelEnabled function on logger prototype"
|
||||
);
|
||||
t.type(logger.info, "function", "should retain default levels");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('creating a new log level with underscores', (t) => {
|
||||
batch.test("creating a new log level with underscores", t => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
NEW_LEVEL_OTHER: { value: 6000, colour: 'blue' }
|
||||
NEW_LEVEL_OTHER: { value: 6000, colour: "blue" }
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
t.test('should export new log level to levels module', (assert) => {
|
||||
t.test("should export new log level to levels module", assert => {
|
||||
assert.ok(log4js.levels.NEW_LEVEL_OTHER);
|
||||
assert.equal(log4js.levels.NEW_LEVEL_OTHER.levelStr, 'NEW_LEVEL_OTHER');
|
||||
assert.equal(log4js.levels.NEW_LEVEL_OTHER.levelStr, "NEW_LEVEL_OTHER");
|
||||
assert.equal(log4js.levels.NEW_LEVEL_OTHER.level, 6000);
|
||||
assert.equal(log4js.levels.NEW_LEVEL_OTHER.colour, 'blue');
|
||||
assert.equal(log4js.levels.NEW_LEVEL_OTHER.colour, "blue");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.type(
|
||||
logger.newLevelOther, 'function',
|
||||
'should create named function on logger prototype in camel case'
|
||||
logger.newLevelOther,
|
||||
"function",
|
||||
"should create named function on logger prototype in camel case"
|
||||
);
|
||||
t.type(
|
||||
logger.isNewLevelOtherEnabled, 'function',
|
||||
'should create named isLevelEnabled function on logger prototype in camel case'
|
||||
logger.isNewLevelOtherEnabled,
|
||||
"function",
|
||||
"should create named isLevelEnabled function on logger prototype in camel case"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('creating log events containing newly created log level', (t) => {
|
||||
batch.test("creating log events containing newly created log level", t => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
LVL1: { value: 6000, colour: 'grey' },
|
||||
LVL2: { value: 5000, colour: 'magenta' }
|
||||
LVL1: { value: 6000, colour: "grey" },
|
||||
LVL2: { value: 5000, colour: "magenta" }
|
||||
},
|
||||
appenders: { recorder: { type: 'recording' } },
|
||||
appenders: { recorder: { type: "recording" } },
|
||||
categories: {
|
||||
default: { appenders: ['recorder'], level: 'LVL1' }
|
||||
default: { appenders: ["recorder"], level: "LVL1" }
|
||||
}
|
||||
});
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
logger.log(log4js.levels.getLevel('LVL1', log4js.levels.DEBUG), 'Event 1');
|
||||
logger.log(log4js.levels.getLevel('LVL1'), 'Event 2');
|
||||
logger.log('LVL1', 'Event 3');
|
||||
logger.lvl1('Event 4');
|
||||
logger.log(log4js.levels.getLevel("LVL1", log4js.levels.DEBUG), "Event 1");
|
||||
logger.log(log4js.levels.getLevel("LVL1"), "Event 2");
|
||||
logger.log("LVL1", "Event 3");
|
||||
logger.lvl1("Event 4");
|
||||
|
||||
logger.lvl2('Event 5');
|
||||
logger.lvl2("Event 5");
|
||||
|
||||
const events = recording.replay();
|
||||
|
||||
t.test('should show log events with new log level', (assert) => {
|
||||
assert.equal(events[0].level.toString(), 'LVL1');
|
||||
assert.equal(events[0].data[0], 'Event 1');
|
||||
t.test("should show log events with new log level", assert => {
|
||||
assert.equal(events[0].level.toString(), "LVL1");
|
||||
assert.equal(events[0].data[0], "Event 1");
|
||||
|
||||
assert.equal(events[1].level.toString(), 'LVL1');
|
||||
assert.equal(events[1].data[0], 'Event 2');
|
||||
assert.equal(events[1].level.toString(), "LVL1");
|
||||
assert.equal(events[1].data[0], "Event 2");
|
||||
|
||||
assert.equal(events[2].level.toString(), 'LVL1');
|
||||
assert.equal(events[2].data[0], 'Event 3');
|
||||
assert.equal(events[2].level.toString(), "LVL1");
|
||||
assert.equal(events[2].data[0], "Event 3");
|
||||
|
||||
assert.equal(events[3].level.toString(), 'LVL1');
|
||||
assert.equal(events[3].data[0], 'Event 4');
|
||||
assert.equal(events[3].level.toString(), "LVL1");
|
||||
assert.equal(events[3].data[0], "Event 4");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.equal(events.length, 4, 'should not be present if min log level is greater than newly created level');
|
||||
t.equal(
|
||||
events.length,
|
||||
4,
|
||||
"should not be present if min log level is greater than newly created level"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('creating a new log level with incorrect parameters', (t) => {
|
||||
batch.test("creating a new log level with incorrect parameters", t => {
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
cheese: { value: 'biscuits' }
|
||||
cheese: { value: "biscuits" }
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level "cheese".value must have an integer value');
|
||||
}, 'level "cheese".value must have an integer value');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
cheese: 'biscuits'
|
||||
cheese: "biscuits"
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level "cheese" must be an object');
|
||||
}, 'level "cheese" must be an object');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
cheese: { thing: 'biscuits' }
|
||||
cheese: { thing: "biscuits" }
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
"level \"cheese\" must have a 'value' property");
|
||||
}, "level \"cheese\" must have a 'value' property");
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
cheese: { value: 3 }
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
"level \"cheese\" must have a 'colour' property");
|
||||
}, "level \"cheese\" must have a 'colour' property");
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
cheese: { value: 3, colour: 'pants' }
|
||||
cheese: { value: 3, colour: "pants" }
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow');
|
||||
}, 'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
'#pants': 3
|
||||
"#pants": 3
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
}, 'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
'thing#pants': 3
|
||||
"thing#pants": 3
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
}, 'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
'1pants': 3
|
||||
"1pants": 3
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
}, 'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
2: 3
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
}, 'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
'cheese!': 3
|
||||
"cheese!": 3
|
||||
},
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "trace" } }
|
||||
});
|
||||
},
|
||||
'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
}, 'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)');
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('calling log with an undefined log level', (t) => {
|
||||
batch.test("calling log with an undefined log level", t => {
|
||||
log4js.configure({
|
||||
appenders: { recorder: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['recorder'], level: 'trace' } }
|
||||
appenders: { recorder: { type: "recording" } },
|
||||
categories: { default: { appenders: ["recorder"], level: "trace" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
logger.log('LEVEL_DOES_NEXT_EXIST', 'Event 1');
|
||||
logger.log(log4js.levels.getLevel('LEVEL_DOES_NEXT_EXIST'), 'Event 2');
|
||||
logger.log("LEVEL_DOES_NEXT_EXIST", "Event 1");
|
||||
logger.log(log4js.levels.getLevel("LEVEL_DOES_NEXT_EXIST"), "Event 2");
|
||||
|
||||
const events = recording.replay();
|
||||
t.equal(events[0].level.toString(), 'INFO', 'should fall back to INFO');
|
||||
t.equal(events[1].level.toString(), 'INFO', 'should fall back to INFO');
|
||||
t.equal(events[0].level.toString(), "INFO", "should fall back to INFO");
|
||||
t.equal(events[1].level.toString(), "INFO", "should fall back to INFO");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('creating a new level with an existing level name', (t) => {
|
||||
batch.test("creating a new level with an existing level name", t => {
|
||||
log4js.configure({
|
||||
levels: {
|
||||
info: { value: 1234, colour: 'blue' }
|
||||
info: { value: 1234, colour: "blue" }
|
||||
},
|
||||
appenders: { recorder: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['recorder'], level: 'all' } }
|
||||
appenders: { recorder: { type: "recording" } },
|
||||
categories: { default: { appenders: ["recorder"], level: "all" } }
|
||||
});
|
||||
|
||||
t.equal(log4js.levels.INFO.level, 1234, 'should override the existing log level');
|
||||
t.equal(log4js.levels.INFO.colour, 'blue', 'should override the existing log level');
|
||||
t.equal(
|
||||
log4js.levels.INFO.level,
|
||||
1234,
|
||||
"should override the existing log level"
|
||||
);
|
||||
t.equal(
|
||||
log4js.levels.INFO.colour,
|
||||
"blue",
|
||||
"should override the existing log level"
|
||||
);
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.info('test message');
|
||||
logger.info("test message");
|
||||
|
||||
const events = recording.replay();
|
||||
t.equal(events[0].level.level, 1234, 'should override the existing log level');
|
||||
t.equal(
|
||||
events[0].level.level,
|
||||
1234,
|
||||
"should override the existing log level"
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
batch.end();
|
||||
|
||||
@ -1,146 +1,169 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recording = require('../../lib/appenders/recording');
|
||||
const { test } = require("tap");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recording = require("../../lib/appenders/recording");
|
||||
|
||||
/**
|
||||
* test a simple regexp
|
||||
*/
|
||||
test('log4js noLogFilter', (batch) => {
|
||||
batch.beforeEach((done) => { recording.reset(); done(); });
|
||||
|
||||
batch.test('appender should exclude events that match the regexp string', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
filtered: {
|
||||
type: 'noLogFilter',
|
||||
exclude: 'This.*not',
|
||||
appender: 'recorder'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug('This should not get logged');
|
||||
logger.debug('This should get logged');
|
||||
logger.debug('Another case that not match the regex, so it should get logged');
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[1].data[0], 'Another case that not match the regex, so it should get logged');
|
||||
t.end();
|
||||
test("log4js noLogFilter", batch => {
|
||||
batch.beforeEach(done => {
|
||||
recording.reset();
|
||||
done();
|
||||
});
|
||||
|
||||
batch.test(
|
||||
"appender should exclude events that match the regexp string",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: "noLogFilter",
|
||||
exclude: "This.*not",
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug("This should not get logged");
|
||||
logger.debug("This should get logged");
|
||||
logger.debug(
|
||||
"Another case that not match the regex, so it should get logged"
|
||||
);
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.equal(
|
||||
logEvents[1].data[0],
|
||||
"Another case that not match the regex, so it should get logged"
|
||||
);
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* test an array of regexp
|
||||
*/
|
||||
batch.test('appender should exclude events that match the regexp string contained in the array', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
filtered: {
|
||||
type: 'noLogFilter',
|
||||
exclude: ['This.*not', 'instead'],
|
||||
appender: 'recorder'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
});
|
||||
batch.test(
|
||||
"appender should exclude events that match the regexp string contained in the array",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: "noLogFilter",
|
||||
exclude: ["This.*not", "instead"],
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug('This should not get logged');
|
||||
logger.debug('This should get logged');
|
||||
logger.debug('Another case that not match the regex, so it should get logged');
|
||||
logger.debug('This case instead it should get logged');
|
||||
logger.debug('The last that should get logged');
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 3);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[1].data[0], 'Another case that not match the regex, so it should get logged');
|
||||
t.equal(logEvents[2].data[0], 'The last that should get logged');
|
||||
t.end();
|
||||
});
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug("This should not get logged");
|
||||
logger.debug("This should get logged");
|
||||
logger.debug(
|
||||
"Another case that not match the regex, so it should get logged"
|
||||
);
|
||||
logger.debug("This case instead it should get logged");
|
||||
logger.debug("The last that should get logged");
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 3);
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.equal(
|
||||
logEvents[1].data[0],
|
||||
"Another case that not match the regex, so it should get logged"
|
||||
);
|
||||
t.equal(logEvents[2].data[0], "The last that should get logged");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
/**
|
||||
* test case insentitive regexp
|
||||
*/
|
||||
batch.test('appender should evaluate the regexp using incase sentitive option', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
filtered: {
|
||||
type: 'noLogFilter',
|
||||
exclude: ['NOT', 'eX.*de'],
|
||||
appender: 'recorder'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
});
|
||||
batch.test(
|
||||
"appender should evaluate the regexp using incase sentitive option",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: "noLogFilter",
|
||||
exclude: ["NOT", "eX.*de"],
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
logger.debug('This should not get logged');
|
||||
logger.debug('This should get logged');
|
||||
logger.debug('Exclude this string');
|
||||
logger.debug('Include this string');
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[1].data[0], 'Include this string');
|
||||
t.end();
|
||||
});
|
||||
logger.debug("This should not get logged");
|
||||
logger.debug("This should get logged");
|
||||
logger.debug("Exclude this string");
|
||||
logger.debug("Include this string");
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.equal(logEvents[1].data[0], "Include this string");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* test empty string or null regexp
|
||||
*/
|
||||
batch.test('appender should skip the match in case of empty or null regexp', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
filtered: {
|
||||
type: 'noLogFilter',
|
||||
exclude: ['', null, undefined],
|
||||
appender: 'recorder'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
});
|
||||
batch.test(
|
||||
"appender should skip the match in case of empty or null regexp",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: "noLogFilter",
|
||||
exclude: ["", null, undefined],
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug('This should get logged');
|
||||
logger.debug('Another string that should get logged');
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[1].data[0], 'Another string that should get logged');
|
||||
t.end();
|
||||
});
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug("This should get logged");
|
||||
logger.debug("Another string that should get logged");
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.equal(logEvents[1].data[0], "Another string that should get logged");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* test for excluding all the events that contains digits
|
||||
*/
|
||||
batch.test('appender should exclude the events that contains digits', (t) => {
|
||||
batch.test("appender should exclude the events that contains digits", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: 'noLogFilter',
|
||||
exclude: '\\d',
|
||||
appender: 'recorder'
|
||||
type: "noLogFilter",
|
||||
exclude: "\\d",
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug('This should get logged');
|
||||
logger.debug('The 2nd event should not get logged');
|
||||
logger.debug('The 3rd event should not get logged, such as the 2nd');
|
||||
logger.debug("This should get logged");
|
||||
logger.debug("The 2nd event should not get logged");
|
||||
logger.debug("The 3rd event should not get logged, such as the 2nd");
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 1);
|
||||
t.equal(logEvents[0].data[0], 'This should get logged');
|
||||
t.equal(logEvents[0].data[0], "This should get logged");
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -148,30 +171,33 @@ test('log4js noLogFilter', (batch) => {
|
||||
* test the cases provided in the documentation
|
||||
* https://log4js-node.github.io/log4js-node/noLogFilter.html
|
||||
*/
|
||||
batch.test('appender should exclude not valid events according to the documentation', (t) => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: 'recording' },
|
||||
filtered: {
|
||||
type: 'noLogFilter',
|
||||
exclude: ['NOT', '\\d', ''],
|
||||
appender: 'recorder'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['filtered'], level: 'DEBUG' } }
|
||||
});
|
||||
batch.test(
|
||||
"appender should exclude not valid events according to the documentation",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
recorder: { type: "recording" },
|
||||
filtered: {
|
||||
type: "noLogFilter",
|
||||
exclude: ["NOT", "\\d", ""],
|
||||
appender: "recorder"
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug('I will be logged in all-the-logs.log');
|
||||
logger.debug('I will be not logged in all-the-logs.log');
|
||||
logger.debug('A 2nd message that will be excluded in all-the-logs.log');
|
||||
logger.debug('Hello again');
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], 'I will be logged in all-the-logs.log');
|
||||
t.equal(logEvents[1].data[0], 'Hello again');
|
||||
t.end();
|
||||
});
|
||||
const logger = log4js.getLogger();
|
||||
logger.debug("I will be logged in all-the-logs.log");
|
||||
logger.debug("I will be not logged in all-the-logs.log");
|
||||
logger.debug("A 2nd message that will be excluded in all-the-logs.log");
|
||||
logger.debug("Hello again");
|
||||
const logEvents = recording.replay();
|
||||
t.equal(logEvents.length, 2);
|
||||
t.equal(logEvents[0].data[0], "I will be logged in all-the-logs.log");
|
||||
t.equal(logEvents[1].data[0], "Hello again");
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
@ -1,48 +1,51 @@
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
|
||||
// passenger provides a non-functional cluster module,
|
||||
// but it does not implement the event emitter functions
|
||||
const passengerCluster = {
|
||||
disconnect: function () { return false; },
|
||||
fork: function () { return false; },
|
||||
setupMaster: function () { return false; },
|
||||
disconnect() {
|
||||
return false;
|
||||
},
|
||||
fork() {
|
||||
return false;
|
||||
},
|
||||
setupMaster() {
|
||||
return false;
|
||||
},
|
||||
isWorker: true,
|
||||
isMaster: false,
|
||||
schedulingPolicy: false,
|
||||
settings: false,
|
||||
worker: false,
|
||||
workers: false,
|
||||
workers: false
|
||||
};
|
||||
|
||||
const vcr = require('../../lib/appenders/recording');
|
||||
const vcr = require("../../lib/appenders/recording");
|
||||
|
||||
const log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
cluster: passengerCluster,
|
||||
'./appenders/recording': vcr
|
||||
}
|
||||
const log4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
cluster: passengerCluster,
|
||||
"./appenders/recording": vcr
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('When running in Passenger', (batch) => {
|
||||
batch.test('it should still log', (t) => {
|
||||
test("When running in Passenger", batch => {
|
||||
batch.test("it should still log", t => {
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
vcr: { type: 'recording' }
|
||||
vcr: { type: "recording" }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['vcr'], level: 'info' }
|
||||
default: { appenders: ["vcr"], level: "info" }
|
||||
},
|
||||
disableClustering: true
|
||||
});
|
||||
log4js.getLogger().info('This should still work');
|
||||
log4js.getLogger().info("This should still work");
|
||||
|
||||
const events = vcr.replay();
|
||||
t.equal(events.length, 1);
|
||||
t.equal(events[0].data[0], 'This should still work');
|
||||
t.equal(events[0].data[0], "This should still work");
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const test = require('tap').test;
|
||||
const cluster = require('cluster');
|
||||
const debug = require('debug')('log4js:pm2-test');
|
||||
const { test } = require("tap");
|
||||
const cluster = require("cluster");
|
||||
const debug = require("debug")("log4js:pm2-test");
|
||||
|
||||
// PM2 runs everything as workers
|
||||
// - no master in the cluster (PM2 acts as master itself)
|
||||
@ -11,7 +9,7 @@ if (cluster.isMaster) {
|
||||
// create two worker forks
|
||||
// PASS IN NODE_APP_INSTANCE HERE
|
||||
const appEvents = {};
|
||||
['0', '1'].forEach((i) => {
|
||||
["0", "1"].forEach(i => {
|
||||
cluster.fork({ NODE_APP_INSTANCE: i });
|
||||
});
|
||||
|
||||
@ -19,13 +17,15 @@ if (cluster.isMaster) {
|
||||
if (worker.type || worker.topic) {
|
||||
msg = worker;
|
||||
}
|
||||
if (msg.type === 'testing') {
|
||||
debug(`Received testing message from ${msg.instance} with events ${msg.events}`);
|
||||
if (msg.type === "testing") {
|
||||
debug(
|
||||
`Received testing message from ${msg.instance} with events ${msg.events}`
|
||||
);
|
||||
appEvents[msg.instance] = msg.events;
|
||||
}
|
||||
|
||||
// we have to do the re-broadcasting that the pm2-intercom module would do.
|
||||
if (msg.topic === 'log4js:message') {
|
||||
if (msg.topic === "log4js:message") {
|
||||
debug(`Received log message ${msg}`);
|
||||
for (const id in cluster.workers) {
|
||||
cluster.workers[id].send(msg);
|
||||
@ -33,70 +33,90 @@ if (cluster.isMaster) {
|
||||
}
|
||||
};
|
||||
|
||||
cluster.on('message', messageHandler);
|
||||
cluster.on("message", messageHandler);
|
||||
|
||||
let count = 0;
|
||||
cluster.on('exit', () => {
|
||||
cluster.on("exit", () => {
|
||||
count += 1;
|
||||
if (count === 2) {
|
||||
// wait for any IPC messages still to come, because it seems they are slooooow.
|
||||
setTimeout(() => {
|
||||
test('PM2 Support', (batch) => {
|
||||
batch.test('should not get any events when turned off', (t) => {
|
||||
t.notOk(appEvents['0'].filter(e => e && e.data[0].indexOf('will not be logged') > -1).length);
|
||||
t.notOk(appEvents['1'].filter(e => e && e.data[0].indexOf('will not be logged') > -1).length);
|
||||
test("PM2 Support", batch => {
|
||||
batch.test("should not get any events when turned off", t => {
|
||||
t.notOk(
|
||||
appEvents["0"].filter(
|
||||
e => e && e.data[0].indexOf("will not be logged") > -1
|
||||
).length
|
||||
);
|
||||
t.notOk(
|
||||
appEvents["1"].filter(
|
||||
e => e && e.data[0].indexOf("will not be logged") > -1
|
||||
).length
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should get events on app instance 0', (t) => {
|
||||
t.equal(appEvents['0'].length, 2);
|
||||
t.equal(appEvents['0'][0].data[0], 'this should now get logged');
|
||||
t.equal(appEvents['0'][1].data[0], 'this should now get logged');
|
||||
batch.test("should get events on app instance 0", t => {
|
||||
t.equal(appEvents["0"].length, 2);
|
||||
t.equal(appEvents["0"][0].data[0], "this should now get logged");
|
||||
t.equal(appEvents["0"][1].data[0], "this should now get logged");
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should not get events on app instance 1', (t) => {
|
||||
t.equal(appEvents['1'].length, 0);
|
||||
batch.test("should not get events on app instance 1", t => {
|
||||
t.equal(appEvents["1"].length, 0);
|
||||
t.end();
|
||||
});
|
||||
batch.end();
|
||||
cluster.removeListener('message', messageHandler);
|
||||
cluster.removeListener("message", messageHandler);
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const recorder = require('../../lib/appenders/recording');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recorder = require("../../lib/appenders/recording");
|
||||
const log4js = require("../../lib/log4js");
|
||||
log4js.configure({
|
||||
appenders: { out: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['out'], level: 'info' } }
|
||||
appenders: { out: { type: "recording" } },
|
||||
categories: { default: { appenders: ["out"], level: "info" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger('test');
|
||||
logger.info('this is a test, but without enabling PM2 support it will not be logged');
|
||||
const logger = log4js.getLogger("test");
|
||||
logger.info(
|
||||
"this is a test, but without enabling PM2 support it will not be logged"
|
||||
);
|
||||
|
||||
// IPC messages can take a while to get through to start with.
|
||||
setTimeout(() => {
|
||||
log4js.shutdown(() => {
|
||||
log4js.configure({
|
||||
appenders: { out: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['out'], level: 'info' } },
|
||||
appenders: { out: { type: "recording" } },
|
||||
categories: { default: { appenders: ["out"], level: "info" } },
|
||||
pm2: true
|
||||
});
|
||||
const anotherLogger = log4js.getLogger('test');
|
||||
const anotherLogger = log4js.getLogger("test");
|
||||
setTimeout(() => {
|
||||
anotherLogger.info('this should now get logged');
|
||||
anotherLogger.info("this should now get logged");
|
||||
}, 1000);
|
||||
|
||||
// if we're the pm2-master we should wait for the other process to send its log messages
|
||||
setTimeout(() => {
|
||||
log4js.shutdown(() => {
|
||||
const events = recorder.replay();
|
||||
debug(`Sending test events ${events} from ${process.env.NODE_APP_INSTANCE}`);
|
||||
debug(
|
||||
`Sending test events ${events} from ${process.env.NODE_APP_INSTANCE}`
|
||||
);
|
||||
process.send(
|
||||
{ type: 'testing', instance: process.env.NODE_APP_INSTANCE, events: events },
|
||||
() => { setTimeout(() => { cluster.worker.disconnect(); }, 1000); }
|
||||
{
|
||||
type: "testing",
|
||||
instance: process.env.NODE_APP_INSTANCE,
|
||||
events
|
||||
},
|
||||
() => {
|
||||
setTimeout(() => {
|
||||
cluster.worker.disconnect();
|
||||
}, 1000);
|
||||
}
|
||||
);
|
||||
});
|
||||
}, 3000);
|
||||
|
||||
@ -1,70 +1,90 @@
|
||||
const test = require('tap').test;
|
||||
const net = require('net');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const vcr = require('../../lib/appenders/recording');
|
||||
const levels = require('../../lib/levels');
|
||||
const LoggingEvent = require('../../lib/LoggingEvent');
|
||||
const { test } = require("tap");
|
||||
const net = require("net");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const vcr = require("../../lib/appenders/recording");
|
||||
const levels = require("../../lib/levels");
|
||||
const LoggingEvent = require("../../lib/LoggingEvent");
|
||||
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
vcr: { type: 'recording' },
|
||||
tcp: { type: 'tcp-server', port: 5678 }
|
||||
vcr: { type: "recording" },
|
||||
tcp: { type: "tcp-server", port: 5678 }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['vcr'], level: 'debug' }
|
||||
default: { appenders: ["vcr"], level: "debug" }
|
||||
}
|
||||
});
|
||||
|
||||
// give the socket a chance to start up
|
||||
test('TCP Server', (batch) => {
|
||||
batch.test('should listen for TCP messages and re-send via process.send', (t) => {
|
||||
setTimeout(() => {
|
||||
const socket = net.connect(5678, () => {
|
||||
socket.write(
|
||||
`${(new LoggingEvent('test-category', levels.INFO, ['something'], {})).serialise()
|
||||
}__LOG4JS__${
|
||||
(new LoggingEvent('test-category', levels.INFO, ['something else'], {})).serialise()
|
||||
}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
|
||||
() => {
|
||||
socket.end();
|
||||
setTimeout(() => {
|
||||
log4js.shutdown(() => {
|
||||
const logs = vcr.replay();
|
||||
t.equal(logs.length, 4);
|
||||
t.match(logs[0], {
|
||||
data: ['something'],
|
||||
categoryName: 'test-category',
|
||||
level: { levelStr: 'INFO' },
|
||||
context: {}
|
||||
test("TCP Server", batch => {
|
||||
batch.test(
|
||||
"should listen for TCP messages and re-send via process.send",
|
||||
t => {
|
||||
setTimeout(() => {
|
||||
const socket = net.connect(5678, () => {
|
||||
socket.write(
|
||||
`${new LoggingEvent(
|
||||
"test-category",
|
||||
levels.INFO,
|
||||
["something"],
|
||||
{}
|
||||
).serialise()}__LOG4JS__${new LoggingEvent(
|
||||
"test-category",
|
||||
levels.INFO,
|
||||
["something else"],
|
||||
{}
|
||||
).serialise()}__LOG4JS__some nonsense__LOG4JS__{"some":"json"}__LOG4JS__`,
|
||||
() => {
|
||||
socket.end();
|
||||
setTimeout(() => {
|
||||
log4js.shutdown(() => {
|
||||
const logs = vcr.replay();
|
||||
t.equal(logs.length, 4);
|
||||
t.match(logs[0], {
|
||||
data: ["something"],
|
||||
categoryName: "test-category",
|
||||
level: { levelStr: "INFO" },
|
||||
context: {}
|
||||
});
|
||||
t.match(logs[1], {
|
||||
data: ["something else"],
|
||||
categoryName: "test-category",
|
||||
level: { levelStr: "INFO" },
|
||||
context: {}
|
||||
});
|
||||
t.match(logs[2], {
|
||||
data: [
|
||||
"Unable to parse log:",
|
||||
"some nonsense",
|
||||
"because: ",
|
||||
SyntaxError
|
||||
],
|
||||
categoryName: "log4js",
|
||||
level: { levelStr: "ERROR" },
|
||||
context: {}
|
||||
});
|
||||
t.match(logs[3], {
|
||||
data: [
|
||||
"Unable to parse log:",
|
||||
'{"some":"json"}',
|
||||
"because: ",
|
||||
TypeError
|
||||
],
|
||||
categoryName: "log4js",
|
||||
level: { levelStr: "ERROR" },
|
||||
context: {}
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
t.match(logs[1], {
|
||||
data: ['something else'],
|
||||
categoryName: 'test-category',
|
||||
level: { levelStr: 'INFO' },
|
||||
context: {}
|
||||
});
|
||||
t.match(logs[2], {
|
||||
data: ['Unable to parse log:', 'some nonsense', 'because: ', SyntaxError],
|
||||
categoryName: 'log4js',
|
||||
level: { levelStr: 'ERROR' },
|
||||
context: {}
|
||||
});
|
||||
t.match(logs[3], {
|
||||
data: ['Unable to parse log:', '{"some":"json"}', 'because: ', TypeError],
|
||||
categoryName: 'log4js',
|
||||
level: { levelStr: 'ERROR' },
|
||||
context: {}
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
}, 100);
|
||||
}
|
||||
);
|
||||
});
|
||||
}, 100);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
socket.unref();
|
||||
}, 100);
|
||||
socket.unref();
|
||||
}, 100);
|
||||
|
||||
batch.end();
|
||||
});
|
||||
batch.end();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
@ -1,6 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
/* jshint loopfunc: true */
|
||||
// This test shows an asymmetry between setLevel and isLevelEnabled
|
||||
// (in log4js-node@0.4.3 and earlier):
|
||||
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
|
||||
@ -8,23 +5,23 @@
|
||||
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
|
||||
//
|
||||
|
||||
const test = require('tap').test;
|
||||
const log4js = require('../../lib/log4js');
|
||||
const { test } = require("tap");
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
const logger = log4js.getLogger('test-setLevel-asymmetry');
|
||||
const logger = log4js.getLogger("test-setLevel-asymmetry");
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
const strLevels = ['Trace', 'Debug', 'Info', 'Warn', 'Error', 'Fatal'];
|
||||
const strLevels = ["Trace", "Debug", "Info", "Warn", "Error", "Fatal"];
|
||||
const log4jsLevels = strLevels.map(log4js.levels.getLevel);
|
||||
|
||||
test('log4js setLevel', (batch) => {
|
||||
strLevels.forEach((strLevel) => {
|
||||
batch.test(`is called with a ${strLevel} as string`, (t) => {
|
||||
test("log4js setLevel", batch => {
|
||||
strLevels.forEach(strLevel => {
|
||||
batch.test(`is called with a ${strLevel} as string`, t => {
|
||||
const log4jsLevel = log4js.levels.getLevel(strLevel);
|
||||
|
||||
t.test('should convert string to level correctly', (assert) => {
|
||||
t.test("should convert string to level correctly", assert => {
|
||||
logger.level = strLevel;
|
||||
log4jsLevels.forEach((level) => {
|
||||
log4jsLevels.forEach(level => {
|
||||
assert.equal(
|
||||
logger.isLevelEnabled(level),
|
||||
log4jsLevel.isLessThanOrEqualTo(level)
|
||||
@ -33,9 +30,9 @@ test('log4js setLevel', (batch) => {
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('should also accept a Level', (assert) => {
|
||||
t.test("should also accept a Level", assert => {
|
||||
logger.level = log4jsLevel;
|
||||
log4jsLevels.forEach((level) => {
|
||||
log4jsLevels.forEach(level => {
|
||||
assert.equal(
|
||||
logger.isLevelEnabled(level),
|
||||
log4jsLevel.isLessThanOrEqualTo(level)
|
||||
|
||||
@ -1,23 +1,21 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
|
||||
const test = require('tap').test;
|
||||
|
||||
test('Stacktraces from errors in different VM context', (t) => {
|
||||
const log4js = require('../../lib/log4js');
|
||||
const recorder = require('../../lib/appenders/recording');
|
||||
const layout = require('../../lib/layouts').basicLayout;
|
||||
const vm = require('vm');
|
||||
test("Stacktraces from errors in different VM context", t => {
|
||||
const log4js = require("../../lib/log4js");
|
||||
const recorder = require("../../lib/appenders/recording");
|
||||
const layout = require("../../lib/layouts").basicLayout;
|
||||
const vm = require("vm");
|
||||
|
||||
log4js.configure({
|
||||
appenders: { vcr: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['vcr'], level: 'debug' } }
|
||||
appenders: { vcr: { type: "recording" } },
|
||||
categories: { default: { appenders: ["vcr"], level: "debug" } }
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
|
||||
try {
|
||||
// Access not defined variable.
|
||||
vm.runInNewContext('myVar();', {}, 'myfile.js');
|
||||
vm.runInNewContext("myVar();", {}, "myfile.js");
|
||||
} catch (e) {
|
||||
// Expect to have a stack trace printed.
|
||||
logger.error(e);
|
||||
@ -26,6 +24,6 @@ test('Stacktraces from errors in different VM context', (t) => {
|
||||
const events = recorder.replay();
|
||||
// recording appender events do not go through layouts, so let's do it
|
||||
const output = layout(events[0]);
|
||||
t.match(output, 'stacktraces-test.js');
|
||||
t.match(output, "stacktraces-test.js");
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -1,57 +1,59 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const layouts = require("../../lib/layouts");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const layouts = require('../../lib/layouts');
|
||||
|
||||
test('stderr appender', (t) => {
|
||||
test("stderr appender", t => {
|
||||
const output = [];
|
||||
|
||||
const appender = sandbox.require(
|
||||
'../../lib/appenders/stderr',
|
||||
{
|
||||
const appender = sandbox
|
||||
.require("../../lib/appenders/stderr", {
|
||||
globals: {
|
||||
process: {
|
||||
stderr: {
|
||||
write: function (data) {
|
||||
write(data) {
|
||||
output.push(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).configure({ type: 'stderr', layout: { type: 'messagePassThrough' } }, layouts);
|
||||
})
|
||||
.configure(
|
||||
{ type: "stderr", layout: { type: "messagePassThrough" } },
|
||||
layouts
|
||||
);
|
||||
|
||||
appender({ data: ['biscuits'] });
|
||||
appender({ data: ["biscuits"] });
|
||||
t.plan(2);
|
||||
t.equal(output.length, 1, 'There should be one message.');
|
||||
t.equal(output[0], 'biscuits\n', 'The message should be biscuits.');
|
||||
t.equal(output.length, 1, "There should be one message.");
|
||||
t.equal(output[0], "biscuits\n", "The message should be biscuits.");
|
||||
t.end();
|
||||
});
|
||||
|
||||
test('stderr appender with default layout', (t) => {
|
||||
test("stderr appender with default layout", t => {
|
||||
const output = [];
|
||||
layouts.colouredLayout = () => 'I used the colouredLayout';
|
||||
layouts.colouredLayout = () => "I used the colouredLayout";
|
||||
|
||||
const appender = sandbox.require(
|
||||
'../../lib/appenders/stderr',
|
||||
{
|
||||
const appender = sandbox
|
||||
.require("../../lib/appenders/stderr", {
|
||||
globals: {
|
||||
process: {
|
||||
stderr: {
|
||||
write: function (data) {
|
||||
write(data) {
|
||||
output.push(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).configure({ type: 'stderr' }, layouts);
|
||||
})
|
||||
.configure({ type: "stderr" }, layouts);
|
||||
|
||||
|
||||
appender({ data: ['biscuits'] });
|
||||
appender({ data: ["biscuits"] });
|
||||
t.plan(2);
|
||||
t.equal(output.length, 1, 'There should be one message.');
|
||||
t.equal(output[0], 'I used the colouredLayout\n', 'The message should have gone through the default layout.');
|
||||
t.equal(output.length, 1, "There should be one message.");
|
||||
t.equal(
|
||||
output[0],
|
||||
"I used the colouredLayout\n",
|
||||
"The message should have gone through the default layout."
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -1,30 +1,30 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const layouts = require("../../lib/layouts");
|
||||
|
||||
const test = require('tap').test;
|
||||
const sandbox = require('@log4js-node/sandboxed-module');
|
||||
const layouts = require('../../lib/layouts');
|
||||
|
||||
test('stdout appender', (t) => {
|
||||
test("stdout appender", t => {
|
||||
const output = [];
|
||||
|
||||
const appender = sandbox.require(
|
||||
'../../lib/appenders/stdout',
|
||||
{
|
||||
const appender = sandbox
|
||||
.require("../../lib/appenders/stdout", {
|
||||
globals: {
|
||||
process: {
|
||||
stdout: {
|
||||
write: function (data) {
|
||||
write(data) {
|
||||
output.push(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).configure({ type: 'stdout', layout: { type: 'messagePassThrough' } }, layouts);
|
||||
})
|
||||
.configure(
|
||||
{ type: "stdout", layout: { type: "messagePassThrough" } },
|
||||
layouts
|
||||
);
|
||||
|
||||
appender({ data: ['cheese'] });
|
||||
appender({ data: ["cheese"] });
|
||||
t.plan(2);
|
||||
t.equal(output.length, 1, 'There should be one message.');
|
||||
t.equal(output[0], 'cheese\n', 'The message should be cheese.');
|
||||
t.equal(output.length, 1, "There should be one message.");
|
||||
t.equal(output[0], "cheese\n", "The message should be cheese.");
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -1,34 +1,32 @@
|
||||
'use strict';
|
||||
const { test } = require("tap");
|
||||
const log4js = require("../../lib/log4js");
|
||||
|
||||
const test = require('tap').test;
|
||||
const log4js = require('../../lib/log4js');
|
||||
|
||||
test('subcategories', (batch) => {
|
||||
batch.test('loggers created after levels configuration is loaded', (t) => {
|
||||
test("subcategories", batch => {
|
||||
batch.test("loggers created after levels configuration is loaded", t => {
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: {
|
||||
default: { appenders: ['stdout'], level: 'TRACE' },
|
||||
sub1: { appenders: ['stdout'], level: 'WARN' },
|
||||
'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
|
||||
'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
|
||||
'sub1.sub12': { appenders: ['stdout'], level: 'INFO' }
|
||||
default: { appenders: ["stdout"], level: "TRACE" },
|
||||
sub1: { appenders: ["stdout"], level: "WARN" },
|
||||
"sub1.sub11": { appenders: ["stdout"], level: "TRACE" },
|
||||
"sub1.sub11.sub111": { appenders: ["stdout"], level: "WARN" },
|
||||
"sub1.sub12": { appenders: ["stdout"], level: "INFO" }
|
||||
}
|
||||
});
|
||||
|
||||
const loggers = {
|
||||
sub1: log4js.getLogger('sub1'), // WARN
|
||||
sub11: log4js.getLogger('sub1.sub11'), // TRACE
|
||||
sub111: log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||
sub12: log4js.getLogger('sub1.sub12'), // INFO
|
||||
sub1: log4js.getLogger("sub1"), // WARN
|
||||
sub11: log4js.getLogger("sub1.sub11"), // TRACE
|
||||
sub111: log4js.getLogger("sub1.sub11.sub111"), // WARN
|
||||
sub12: log4js.getLogger("sub1.sub12"), // INFO
|
||||
|
||||
sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||
sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||
sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||
sub0: log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||
sub13: log4js.getLogger("sub1.sub13"), // Inherits sub1: WARN
|
||||
sub112: log4js.getLogger("sub1.sub11.sub112"), // Inherits sub1.sub11: TRACE
|
||||
sub121: log4js.getLogger("sub1.sub12.sub121"), // Inherits sub12: INFO
|
||||
sub0: log4js.getLogger("sub0") // Not defined, not inherited: TRACE
|
||||
};
|
||||
|
||||
t.test('check logger levels', (assert) => {
|
||||
t.test("check logger levels", assert => {
|
||||
assert.equal(loggers.sub1.level, log4js.levels.WARN);
|
||||
assert.equal(loggers.sub11.level, log4js.levels.TRACE);
|
||||
assert.equal(loggers.sub111.level, log4js.levels.WARN);
|
||||
@ -44,38 +42,38 @@ test('subcategories', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('loggers created before levels configuration is loaded', (t) => {
|
||||
batch.test("loggers created before levels configuration is loaded", t => {
|
||||
// reset to defaults
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'info' } }
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: { default: { appenders: ["stdout"], level: "info" } }
|
||||
});
|
||||
|
||||
// these should all get the default log level of INFO
|
||||
const loggers = {
|
||||
sub1: log4js.getLogger('sub1'), // WARN
|
||||
sub11: log4js.getLogger('sub1.sub11'), // TRACE
|
||||
sub111: log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||
sub12: log4js.getLogger('sub1.sub12'), // INFO
|
||||
sub1: log4js.getLogger("sub1"), // WARN
|
||||
sub11: log4js.getLogger("sub1.sub11"), // TRACE
|
||||
sub111: log4js.getLogger("sub1.sub11.sub111"), // WARN
|
||||
sub12: log4js.getLogger("sub1.sub12"), // INFO
|
||||
|
||||
sub13: log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||
sub112: log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||
sub121: log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||
sub0: log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||
sub13: log4js.getLogger("sub1.sub13"), // Inherits sub1: WARN
|
||||
sub112: log4js.getLogger("sub1.sub11.sub112"), // Inherits sub1.sub11: TRACE
|
||||
sub121: log4js.getLogger("sub1.sub12.sub121"), // Inherits sub12: INFO
|
||||
sub0: log4js.getLogger("sub0") // Not defined, not inherited: TRACE
|
||||
};
|
||||
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: {
|
||||
default: { appenders: ['stdout'], level: 'TRACE' },
|
||||
sub1: { appenders: ['stdout'], level: 'WARN' },
|
||||
'sub1.sub11': { appenders: ['stdout'], level: 'TRACE' },
|
||||
'sub1.sub11.sub111': { appenders: ['stdout'], level: 'WARN' },
|
||||
'sub1.sub12': { appenders: ['stdout'], level: 'INFO' }
|
||||
default: { appenders: ["stdout"], level: "TRACE" },
|
||||
sub1: { appenders: ["stdout"], level: "WARN" },
|
||||
"sub1.sub11": { appenders: ["stdout"], level: "TRACE" },
|
||||
"sub1.sub11.sub111": { appenders: ["stdout"], level: "WARN" },
|
||||
"sub1.sub12": { appenders: ["stdout"], level: "INFO" }
|
||||
}
|
||||
});
|
||||
|
||||
t.test('should still get new levels', (assert) => {
|
||||
t.test("should still get new levels", assert => {
|
||||
// can't use .equal because by calling log4js.configure we create new instances
|
||||
assert.same(loggers.sub1.level, log4js.levels.WARN);
|
||||
assert.same(loggers.sub11.level, log4js.levels.TRACE);
|
||||
@ -91,32 +89,35 @@ test('subcategories', (batch) => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('setting level on subcategories should not set parent level', (t) => {
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: {
|
||||
default: { appenders: ['stdout'], level: 'trace' },
|
||||
parent: { appenders: ['stdout'], level: 'error' }
|
||||
}
|
||||
});
|
||||
batch.test(
|
||||
"setting level on subcategories should not set parent level",
|
||||
t => {
|
||||
log4js.configure({
|
||||
appenders: { stdout: { type: "stdout" } },
|
||||
categories: {
|
||||
default: { appenders: ["stdout"], level: "trace" },
|
||||
parent: { appenders: ["stdout"], level: "error" }
|
||||
}
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger('parent');
|
||||
const subLogger = log4js.getLogger('parent.child');
|
||||
const logger = log4js.getLogger("parent");
|
||||
const subLogger = log4js.getLogger("parent.child");
|
||||
|
||||
t.test('should inherit parent level', (assert) => {
|
||||
assert.same(subLogger.level, log4js.levels.ERROR);
|
||||
assert.end();
|
||||
});
|
||||
t.test("should inherit parent level", assert => {
|
||||
assert.same(subLogger.level, log4js.levels.ERROR);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('changing child level should not change parent level', (assert) => {
|
||||
subLogger.level = 'info';
|
||||
assert.same(subLogger.level, log4js.levels.INFO);
|
||||
assert.same(logger.level, log4js.levels.ERROR);
|
||||
assert.end();
|
||||
});
|
||||
t.test("changing child level should not change parent level", assert => {
|
||||
subLogger.level = "info";
|
||||
assert.same(subLogger.level, log4js.levels.INFO);
|
||||
assert.same(logger.level, log4js.levels.ERROR);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
}
|
||||
);
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
@ -1,51 +1,53 @@
|
||||
const test = require('tap').test;
|
||||
const net = require('net');
|
||||
const log4js = require('../../lib/log4js');
|
||||
const LoggingEvent = require('../../lib/LoggingEvent');
|
||||
const { test } = require("tap");
|
||||
const net = require("net");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const LoggingEvent = require("../../lib/LoggingEvent");
|
||||
|
||||
const messages = [];
|
||||
const server = net.createServer((socket) => {
|
||||
socket.setEncoding('utf8');
|
||||
socket.on('data', (data) => {
|
||||
const server = net.createServer(socket => {
|
||||
socket.setEncoding("utf8");
|
||||
socket.on("data", data => {
|
||||
data
|
||||
.split('__LOG4JS__')
|
||||
.split("__LOG4JS__")
|
||||
.filter(s => s.length)
|
||||
.forEach((s) => { messages.push(LoggingEvent.deserialise(s)); });
|
||||
.forEach(s => {
|
||||
messages.push(LoggingEvent.deserialise(s));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
server.unref();
|
||||
|
||||
server.listen(() => {
|
||||
const port = server.address().port;
|
||||
const { port } = server.address();
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
tcp: { type: 'tcp', port: port }
|
||||
tcp: { type: "tcp", port }
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['tcp'], level: 'debug' }
|
||||
default: { appenders: ["tcp"], level: "debug" }
|
||||
}
|
||||
});
|
||||
|
||||
const logger = log4js.getLogger();
|
||||
logger.info('This should be sent via TCP.');
|
||||
logger.info('This should also be sent via TCP and not break things.');
|
||||
logger.info("This should be sent via TCP.");
|
||||
logger.info("This should also be sent via TCP and not break things.");
|
||||
log4js.shutdown(() => {
|
||||
server.close(() => {
|
||||
test('TCP Appender', (batch) => {
|
||||
batch.test('should send log messages as JSON over TCP', (t) => {
|
||||
test("TCP Appender", batch => {
|
||||
batch.test("should send log messages as JSON over TCP", t => {
|
||||
t.equal(messages.length, 2);
|
||||
t.match(messages[0], {
|
||||
data: ['This should be sent via TCP.'],
|
||||
categoryName: 'default',
|
||||
data: ["This should be sent via TCP."],
|
||||
categoryName: "default",
|
||||
context: {},
|
||||
level: { levelStr: 'INFO' }
|
||||
level: { levelStr: "INFO" }
|
||||
});
|
||||
t.match(messages[1], {
|
||||
data: ['This should also be sent via TCP and not break things.'],
|
||||
categoryName: 'default',
|
||||
data: ["This should also be sent via TCP and not break things."],
|
||||
categoryName: "default",
|
||||
context: {},
|
||||
level: { levelStr: 'INFO' }
|
||||
level: { levelStr: "INFO" }
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user