mirror of
https://github.com/log4js-node/log4js-node.git
synced 2025-12-08 19:26:01 +00:00
Merge branch 'master' into tcp-refactor
This commit is contained in:
commit
6f4dfeaafc
2
.gitignore
vendored
2
.gitignore
vendored
@ -21,3 +21,5 @@ coverage/
|
||||
.nyc_output/
|
||||
_site
|
||||
Gemfile.lock
|
||||
Dockerfile
|
||||
docker-compose.yml
|
||||
|
||||
@ -12,3 +12,7 @@ lib-cov
|
||||
coverage.html
|
||||
Makefile
|
||||
coverage
|
||||
Gemfile
|
||||
Gemfile.lock
|
||||
docker-compose.yml
|
||||
Dockerfile
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# log4js-node [](http://travis-ci.org/nomiddlename/log4js-node)
|
||||
# log4js-node [](http://travis-ci.org/log4js-node/log4js-node)
|
||||
|
||||
[](https://nodei.co/npm/log4js/)
|
||||
[](https://codecov.io/gh/nomiddlename/log4js-node)
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
// load modules
|
||||
const log4js = require('log4js');
|
||||
const express = require('express');
|
||||
|
||||
const app = express();
|
||||
|
||||
// config
|
||||
@ -25,17 +26,15 @@ const logger = log4js.getLogger('log4jslog');
|
||||
// logger.setLevel('ERROR');
|
||||
|
||||
// express app
|
||||
app.configure(() => {
|
||||
app.use(express.favicon(''));
|
||||
// app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
// app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' }));
|
||||
app.use(express.favicon(''));
|
||||
// app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
// app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' }));
|
||||
|
||||
// ### AUTO LEVEL DETECTION
|
||||
// http responses 3xx, level = WARN
|
||||
// http responses 4xx & 5xx, level = ERROR
|
||||
// else.level = INFO
|
||||
app.use(log4js.connectLogger(logger, { level: 'auto' }));
|
||||
});
|
||||
// ### AUTO LEVEL DETECTION
|
||||
// http responses 3xx, level = WARN
|
||||
// http responses 4xx & 5xx, level = ERROR
|
||||
// else.level = INFO
|
||||
app.use(log4js.connectLogger(logger, { level: 'auto' }));
|
||||
|
||||
// route
|
||||
app.get('/', (req, res) => {
|
||||
|
||||
@ -8,7 +8,7 @@ const debug = require('debug')('log4js:configuration');
|
||||
|
||||
let cluster;
|
||||
try {
|
||||
cluster = require('cluster'); // eslint-disable-line global-require
|
||||
cluster = require('cluster'); // eslint-disable-line global-require
|
||||
} catch (e) {
|
||||
debug('Clustering support disabled because require(cluster) threw an error: ', e);
|
||||
}
|
||||
@ -36,14 +36,12 @@ function anInteger(thing) {
|
||||
}
|
||||
|
||||
class Configuration {
|
||||
|
||||
throwExceptionIf(checks, message) {
|
||||
const tests = Array.isArray(checks) ? checks : [checks];
|
||||
tests.forEach((test) => {
|
||||
if (test) {
|
||||
throw new Error(
|
||||
`Problem with log4js configuration: (${util.inspect(this.candidate, { depth: 5 })}) - ${message}`
|
||||
);
|
||||
throw new Error(`Problem with log4js configuration: (${util.inspect(this.candidate, { depth: 5 })})` +
|
||||
` - ${message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -22,7 +22,7 @@ function getUrl(req) {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
/**
|
||||
* Adds custom {token, replacement} objects to defaults,
|
||||
* overwriting the defaults if any tokens clash
|
||||
*
|
||||
@ -37,8 +37,8 @@ function assembleTokens(req, res, customTokens) {
|
||||
const a = array.concat();
|
||||
for (let i = 0; i < a.length; ++i) {
|
||||
for (let j = i + 1; j < a.length; ++j) {
|
||||
// not === because token can be regexp object
|
||||
/* eslint eqeqeq:0 */
|
||||
// not === because token can be regexp object
|
||||
/* eslint eqeqeq:0 */
|
||||
if (a[i].token == a[j].token) {
|
||||
a.splice(j--, 1);
|
||||
}
|
||||
@ -91,15 +91,15 @@ function assembleTokens(req, res, customTokens) {
|
||||
token: /:res\[([^\]]+)]/g,
|
||||
replacement: function (_, field) {
|
||||
return res._headers ?
|
||||
(res._headers[field.toLowerCase()] || res.__headers[field])
|
||||
: (res.__headers && res.__headers[field]);
|
||||
(res._headers[field.toLowerCase()] || res.__headers[field])
|
||||
: (res.__headers && res.__headers[field]);
|
||||
}
|
||||
});
|
||||
|
||||
return arrayUniqueTokens(customTokens.concat(defaultTokens));
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Return formatted log line.
|
||||
*
|
||||
* @param {String} str
|
||||
@ -114,7 +114,7 @@ function format(str, tokens) {
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Return RegExp Object about nolog
|
||||
*
|
||||
* @param {String|Array} nolog
|
||||
@ -154,7 +154,7 @@ function createNoLogCondition(nolog) {
|
||||
}
|
||||
|
||||
if (Array.isArray(nolog)) {
|
||||
// convert to strings
|
||||
// convert to strings
|
||||
const regexpsAsStrings = nolog.map(reg => (reg.source ? reg.source : reg));
|
||||
regexp = new RegExp(regexpsAsStrings.join('|'));
|
||||
}
|
||||
|
||||
@ -112,10 +112,10 @@ function timestampLevelAndCategory(loggingEvent, colour, timezoneOffset) {
|
||||
*/
|
||||
function basicLayout(loggingEvent, timezoneOffset) {
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
undefined,
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
loggingEvent,
|
||||
undefined,
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -124,10 +124,10 @@ function basicLayout(loggingEvent, timezoneOffset) {
|
||||
*/
|
||||
function colouredLayout(loggingEvent, timezoneOffset) {
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
loggingEvent.level.colour,
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
loggingEvent,
|
||||
loggingEvent.level.colour,
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
function messagePassThroughLayout(loggingEvent) {
|
||||
|
||||
@ -36,7 +36,6 @@ module.exports = function (customLevels) {
|
||||
}
|
||||
return this.level === otherLevel.level;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const defaultLevels = {
|
||||
|
||||
@ -31,7 +31,7 @@ const layouts = require('./layouts');
|
||||
|
||||
let cluster;
|
||||
try {
|
||||
cluster = require('cluster'); // eslint-disable-line global-require
|
||||
cluster = require('cluster'); // eslint-disable-line global-require
|
||||
} catch (e) {
|
||||
debug('Clustering support disabled because require(cluster) threw an error: ', e);
|
||||
}
|
||||
@ -78,9 +78,8 @@ function setLevelForCategory(category, level) {
|
||||
debug(`setLevelForCategory: found ${categoryConfig} for ${category}`);
|
||||
if (!categoryConfig) {
|
||||
const sourceCategoryConfig = configForCategory(category);
|
||||
debug(
|
||||
`setLevelForCategory: no config found for category, found ${sourceCategoryConfig} for parents of ${category}`
|
||||
);
|
||||
debug('setLevelForCategory: no config found for category, ' +
|
||||
`found ${sourceCategoryConfig} for parents of ${category}`);
|
||||
categoryConfig = { appenders: sourceCategoryConfig.appenders };
|
||||
}
|
||||
categoryConfig.level = level;
|
||||
@ -159,23 +158,22 @@ function configure(configurationFileOrObject) {
|
||||
LoggingEvent = loggerModule.LoggingEvent;
|
||||
module.exports.connectLogger = connectModule(config.levels).connectLogger;
|
||||
|
||||
// just in case configure is called after shutdown
|
||||
process.removeListener('message', receiver);
|
||||
cluster.removeListener('message', receiver);
|
||||
if (config.disableClustering) {
|
||||
debug('Not listening for cluster messages, because clustering disabled.');
|
||||
} else {
|
||||
} else if (isPM2Master()) {
|
||||
// PM2 cluster support
|
||||
// PM2 runs everything as workers - install pm2-intercom for this to work.
|
||||
// we only want one of the app instances to write logs
|
||||
if (isPM2Master()) {
|
||||
debug('listening for PM2 broadcast messages');
|
||||
process.removeListener('message', receiver);
|
||||
process.on('message', receiver);
|
||||
} else if (cluster.isMaster) {
|
||||
debug('listening for cluster messages');
|
||||
cluster.removeListener('message', receiver);
|
||||
cluster.on('message', receiver);
|
||||
} else {
|
||||
debug('not listening for messages, because we are not a master process');
|
||||
}
|
||||
debug('listening for PM2 broadcast messages');
|
||||
process.on('message', receiver);
|
||||
} else if (cluster.isMaster) {
|
||||
debug('listening for cluster messages');
|
||||
cluster.on('message', receiver);
|
||||
} else {
|
||||
debug('not listening for messages, because we are not a master process');
|
||||
}
|
||||
|
||||
enabled = true;
|
||||
|
||||
@ -6,7 +6,7 @@ const debug = require('debug')('log4js:logger');
|
||||
|
||||
let cluster;
|
||||
try {
|
||||
cluster = require('cluster'); // eslint-disable-line global-require
|
||||
cluster = require('cluster'); // eslint-disable-line global-require
|
||||
} catch (e) {
|
||||
debug('Clustering support disabled because require(cluster) threw an error: ', e);
|
||||
}
|
||||
|
||||
7457
package-lock.json
generated
7457
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -25,11 +25,10 @@
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "find test -type f ! -name '*.json' ! -name '*.js' ! -name '.eslintrc' -delete && rm *.log",
|
||||
"lint": "eslint lib/ test/",
|
||||
"prepush": "npm test",
|
||||
"commitmsg": "validate-commit-msg",
|
||||
"posttest": "npm run clean",
|
||||
"pretest": "eslint lib/**/*",
|
||||
"pretest": "eslint 'lib/**/*.js' 'test/**/*.js'",
|
||||
"test": "tap 'test/tap/**/*.js'",
|
||||
"coverage": "tap 'test/tap/**/*.js' --cov",
|
||||
"codecov": "tap 'test/tap/**/*.js' --cov --coverage-report=lcov && codecov"
|
||||
@ -39,10 +38,10 @@
|
||||
"lib": "lib"
|
||||
},
|
||||
"dependencies": {
|
||||
"date-format": "^1.1.0",
|
||||
"debug": "^2.6.8",
|
||||
"date-format": "^1.2.0",
|
||||
"debug": "^3.1.0",
|
||||
"semver": "^5.3.0",
|
||||
"streamroller": "^0.6.0"
|
||||
"streamroller": "^0.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"codecov": "^3.0.0",
|
||||
|
||||
@ -24,9 +24,8 @@ function testAppender(label) {
|
||||
test('log4js configuration validation', (batch) => {
|
||||
batch.test('should give error if config is just plain silly', (t) => {
|
||||
[null, undefined, '', ' ', []].forEach((config) => {
|
||||
const expectedError = new Error(
|
||||
`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`
|
||||
);
|
||||
const expectedError =
|
||||
new Error(`Problem with log4js configuration: (${util.inspect(config)}) - must be an object.`);
|
||||
t.throws(
|
||||
() => new Configuration(config),
|
||||
expectedError
|
||||
@ -37,34 +36,32 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if config is an empty object', (t) => {
|
||||
const expectedError = new Error(
|
||||
'Problem with log4js configuration: ({}) - must have a property "appenders" of type object.'
|
||||
);
|
||||
const expectedError =
|
||||
new Error('Problem with log4js configuration: ({}) - must have a property "appenders" of type object.');
|
||||
t.throws(() => new Configuration({}), expectedError);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if config has no appenders', (t) => {
|
||||
const expectedError = new Error(
|
||||
'Problem with log4js configuration: ({ categories: {} }) - must have a property "appenders" of type object.'
|
||||
);
|
||||
const expectedError =
|
||||
new Error('Problem with log4js configuration: ({ categories: {} }) ' +
|
||||
'- must have a property "appenders" of type object.');
|
||||
t.throws(() => new Configuration({ categories: {} }), expectedError);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if config has no categories', (t) => {
|
||||
const expectedError = new Error(
|
||||
'Problem with log4js configuration: ({ appenders: {} }) - must have a property "categories" of type object.'
|
||||
);
|
||||
const expectedError =
|
||||
new Error('Problem with log4js configuration: ({ appenders: {} }) ' +
|
||||
'- must have a property "categories" of type object.');
|
||||
t.throws(() => new Configuration({ appenders: {} }), expectedError);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should give error if appenders is not an object', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ({ appenders: [], categories: [] })' +
|
||||
' - must have a property "appenders" of type object.'
|
||||
);
|
||||
const error =
|
||||
new Error('Problem with log4js configuration: ({ appenders: [], categories: [] })' +
|
||||
' - must have a property "appenders" of type object.');
|
||||
t.throws(
|
||||
() => new Configuration({ appenders: [], categories: [] }),
|
||||
error
|
||||
@ -73,10 +70,9 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if appenders are not all valid', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ({ appenders: { thing: \'cheese\' }, categories: {} })' +
|
||||
' - appender "thing" is not valid (must be an object with property "type")'
|
||||
);
|
||||
const error =
|
||||
new Error('Problem with log4js configuration: ({ appenders: { thing: \'cheese\' }, categories: {} })' +
|
||||
' - appender "thing" is not valid (must be an object with property "type")');
|
||||
t.throws(
|
||||
() => new Configuration({ appenders: { thing: 'cheese' }, categories: {} }),
|
||||
error
|
||||
@ -85,10 +81,8 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should require at least one appender', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ({ appenders: {}, categories: {} })' +
|
||||
' - must define at least one appender.'
|
||||
);
|
||||
const error = new Error('Problem with log4js configuration: ({ appenders: {}, categories: {} })' +
|
||||
' - must define at least one appender.');
|
||||
t.throws(
|
||||
() => new Configuration({ appenders: {}, categories: {} }),
|
||||
error
|
||||
@ -97,11 +91,9 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if categories are not all valid', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { stdout: { type: \'stdout\' } },\n categories: { thing: \'cheese\' } })' +
|
||||
' - category "thing" is not valid (must be an object with properties "appenders" and "level")'
|
||||
);
|
||||
' - category "thing" is not valid (must be an object with properties "appenders" and "level")');
|
||||
t.throws(
|
||||
() => new Configuration({ appenders: { stdout: { type: 'stdout' } }, categories: { thing: 'cheese' } }),
|
||||
error
|
||||
@ -110,27 +102,24 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if default category not defined', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { stdout: { type: \'stdout\' } },\n' +
|
||||
' categories: { thing: { appenders: [ \'stdout\' ], level: \'ERROR\' } } })' +
|
||||
' - must define a "default" category.'
|
||||
);
|
||||
' - must define a "default" category.');
|
||||
t.throws(
|
||||
() => new Configuration({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { thing: { appenders: ['stdout'], level: 'ERROR' } } }
|
||||
),
|
||||
categories: { thing: { appenders: ['stdout'], level: 'ERROR' } }
|
||||
}),
|
||||
error
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should require at least one category', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ({ appenders: { stdout: { type: \'stdout\' } }, categories: {} })' +
|
||||
' - must define at least one category.'
|
||||
);
|
||||
const error =
|
||||
new Error('Problem with log4js configuration: ({ appenders: { stdout: { type: \'stdout\' } }, categories: {} })' +
|
||||
' - must define at least one category.');
|
||||
t.throws(
|
||||
() => new Configuration({ appenders: { stdout: { type: 'stdout' } }, categories: {} }),
|
||||
error
|
||||
@ -139,12 +128,10 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if category.appenders is not an array', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { stdout: { type: \'stdout\' } },\n' +
|
||||
' categories: { thing: { appenders: {}, level: \'ERROR\' } } })' +
|
||||
' - category "thing" is not valid (appenders must be an array of appender names)'
|
||||
);
|
||||
' - category "thing" is not valid (appenders must be an array of appender names)');
|
||||
t.throws(
|
||||
() => new Configuration({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
@ -156,12 +143,10 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if category.appenders is empty', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { stdout: { type: \'stdout\' } },\n' +
|
||||
' categories: { thing: { appenders: [], level: \'ERROR\' } } })' +
|
||||
' - category "thing" is not valid (appenders must contain at least one appender name)'
|
||||
);
|
||||
' - category "thing" is not valid (appenders must contain at least one appender name)');
|
||||
t.throws(
|
||||
() => new Configuration({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
@ -173,12 +158,10 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if categories do not refer to valid appenders', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { stdout: { type: \'stdout\' } },\n' +
|
||||
' categories: { thing: { appenders: [ \'cheese\' ], level: \'ERROR\' } } })' +
|
||||
' - category "thing" is not valid (appender "cheese" is not defined)'
|
||||
);
|
||||
' - category "thing" is not valid (appender "cheese" is not defined)');
|
||||
t.throws(
|
||||
() => new Configuration({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
@ -190,13 +173,11 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if category level is not valid', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { stdout: { type: \'stdout\' } },\n' +
|
||||
' categories: { default: { appenders: [ \'stdout\' ], level: \'Biscuits\' } } })' +
|
||||
' - category "default" is not valid (level "Biscuits" not recognised; ' +
|
||||
'valid levels are ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, MARK, OFF)'
|
||||
);
|
||||
'valid levels are ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, MARK, OFF)');
|
||||
t.throws(
|
||||
() => new Configuration({
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
@ -208,12 +189,10 @@ test('log4js configuration validation', (batch) => {
|
||||
});
|
||||
|
||||
batch.test('should give error if appender type cannot be found', (t) => {
|
||||
const error = new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
const error = new Error('Problem with log4js configuration: ' +
|
||||
'({ appenders: { thing: { type: \'cheese\' } },\n' +
|
||||
' categories: { default: { appenders: [ \'thing\' ], level: \'ERROR\' } } })' +
|
||||
' - appender "thing" is not valid (type "cheese" could not be found)'
|
||||
);
|
||||
' - appender "thing" is not valid (type "cheese" could not be found)');
|
||||
t.throws(
|
||||
() => new Configuration({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
@ -278,9 +257,7 @@ test('log4js configuration validation', (batch) => {
|
||||
sandboxConfig.requires[
|
||||
`${path.join(mainPath, '../../node_modules/tap/node_modules/nyc/bin/cheese')}`
|
||||
] = testAppender('correct');
|
||||
const SandboxedConfiguration = sandbox.require(
|
||||
'../../lib/configuration', sandboxConfig
|
||||
);
|
||||
const SandboxedConfiguration = sandbox.require('../../lib/configuration', sandboxConfig);
|
||||
|
||||
const config = new SandboxedConfiguration({
|
||||
appenders: { thing: { type: 'cheese' } },
|
||||
|
||||
@ -32,7 +32,7 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
t.include(contents, 'This should be in the file');
|
||||
t.match(
|
||||
contents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
@ -55,13 +55,15 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
t.teardown(() => { removeFile('date-file-test.log'); });
|
||||
|
||||
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', (err, contents) => {
|
||||
t.include(contents, `this should be written to the file${EOL}`);
|
||||
t.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
t.end();
|
||||
log4js.shutdown(() => {
|
||||
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', (err, contents) => {
|
||||
t.include(contents, `this should be written to the file${EOL}`);
|
||||
t.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
|
||||
t.teardown(() => { removeFile('date-file-test.log'); });
|
||||
});
|
||||
|
||||
batch.test('configure with options.alwaysIncludePattern', (t) => {
|
||||
@ -96,13 +98,13 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
t.teardown(() => { removeFile(`date-file-test${thisTime}`); });
|
||||
|
||||
// wait for filesystem to catch up
|
||||
setTimeout(() => {
|
||||
log4js.shutdown(() => {
|
||||
fs.readFile(path.join(__dirname, `date-file-test${thisTime}`), 'utf8', (err, contents) => {
|
||||
t.include(contents, 'this should be written to the file with the appended date');
|
||||
t.include(contents, 'this is existing data', 'should not overwrite the file on open (issue #132)');
|
||||
t.end();
|
||||
});
|
||||
}, 100);
|
||||
});
|
||||
});
|
||||
|
||||
batch.test('should flush logs on shutdown', (t) => {
|
||||
@ -124,7 +126,7 @@ test('../../lib/appenders/dateFile', (batch) => {
|
||||
t.equal(fileContents.split(EOL).length, 4);
|
||||
t.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -36,7 +36,7 @@ test('log4js fileAppender', (batch) => {
|
||||
t.include(fileContents, `This should be in the file.${EOL}`);
|
||||
t.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
@ -63,7 +63,7 @@ test('log4js fileAppender', (batch) => {
|
||||
t.equal(fileContents.split(EOL).length, 4);
|
||||
t.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
@ -84,9 +84,13 @@ test('log4js fileAppender', (batch) => {
|
||||
// log file of 100 bytes maximum, no backups
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: { type: 'file', filename: testFile, maxLogSize: 100, backups: 0 }
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: 100, backups: 0
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'debug' } }
|
||||
categories: {
|
||||
default: { appenders: ['file'], level: 'debug' }
|
||||
}
|
||||
});
|
||||
|
||||
logger.info('This is the first log message.');
|
||||
@ -98,9 +102,7 @@ test('log4js fileAppender', (batch) => {
|
||||
t.include(fileContents, 'This is the second log message.');
|
||||
t.equal(fileContents.indexOf('This is the first log message.'), -1);
|
||||
fs.readdir(__dirname, (e, files) => {
|
||||
const logFiles = files.filter(
|
||||
file => file.includes('fa-maxFileSize-test.log')
|
||||
);
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-test.log'));
|
||||
t.equal(logFiles.length, 2, 'should be 2 files');
|
||||
t.end();
|
||||
});
|
||||
@ -124,7 +126,9 @@ test('log4js fileAppender', (batch) => {
|
||||
// log file of 50 bytes maximum, 2 backups
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: { type: 'file', filename: testFile, maxLogSize: 50, backups: 2 }
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: 50, backups: 2
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'debug' } }
|
||||
});
|
||||
@ -136,9 +140,7 @@ test('log4js fileAppender', (batch) => {
|
||||
// give the system a chance to open the stream
|
||||
setTimeout(() => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.sort().filter(
|
||||
file => file.includes('fa-maxFileSize-with-backups-test.log')
|
||||
);
|
||||
const logFiles = files.sort().filter(file => file.includes('fa-maxFileSize-with-backups-test.log'));
|
||||
t.equal(logFiles.length, 3);
|
||||
t.same(logFiles, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
@ -184,7 +186,9 @@ test('log4js fileAppender', (batch) => {
|
||||
// log file of 50 bytes maximum, 2 backups
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
file: { type: 'file', filename: testFile, maxLogSize: 50, backups: 2, compress: true }
|
||||
file: {
|
||||
type: 'file', filename: testFile, maxLogSize: 50, backups: 2, compress: true
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['file'], level: 'debug' } }
|
||||
});
|
||||
@ -195,9 +199,7 @@ test('log4js fileAppender', (batch) => {
|
||||
// give the system a chance to open the stream
|
||||
setTimeout(() => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.sort().filter(
|
||||
file => file.includes('fa-maxFileSize-with-backups-compressed-test.log')
|
||||
);
|
||||
const logFiles = files.sort().filter(file => file.includes('fa-maxFileSize-with-backups-compressed-test.log'));
|
||||
t.equal(logFiles.length, 3, 'should be 3 files');
|
||||
t.same(logFiles, [
|
||||
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||
|
||||
@ -35,7 +35,7 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
t.include(fileContents, `This should be in the file.${EOL}`);
|
||||
t.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
@ -55,7 +55,11 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
|
||||
// log file of 100 bytes maximum, no backups
|
||||
log4js.configure({
|
||||
appenders: { sync: { type: 'fileSync', filename: testFile, maxLogSize: 100, backups: 0 } },
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync', filename: testFile, maxLogSize: 100, backups: 0
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['sync'], level: 'debug' } }
|
||||
});
|
||||
logger.info('This is the first log message.');
|
||||
@ -72,9 +76,7 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
|
||||
t.test('there should be two test files', (assert) => {
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.filter(
|
||||
file => file.includes('fa-maxFileSize-sync-test.log')
|
||||
);
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-sync-test.log'));
|
||||
assert.equal(logFiles.length, 2);
|
||||
assert.end();
|
||||
});
|
||||
@ -97,7 +99,11 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
|
||||
// log file of 50 bytes maximum, 2 backups
|
||||
log4js.configure({
|
||||
appenders: { sync: { type: 'fileSync', filename: testFile, maxLogSize: 50, backups: 2 } },
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync', filename: testFile, maxLogSize: 50, backups: 2
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['sync'], level: 'debug' } }
|
||||
});
|
||||
logger.info('This is the first log message.');
|
||||
@ -108,9 +114,7 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
t.test('the log files', (assert) => {
|
||||
assert.plan(5);
|
||||
fs.readdir(__dirname, (err, files) => {
|
||||
const logFiles = files.filter(
|
||||
file => file.includes('fa-maxFileSize-with-backups-sync-test.log')
|
||||
);
|
||||
const logFiles = files.filter(file => file.includes('fa-maxFileSize-with-backups-sync-test.log'));
|
||||
assert.equal(logFiles.length, 3, 'should be 3 files');
|
||||
assert.same(logFiles, [
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
@ -136,11 +140,12 @@ test('log4js fileSyncAppender', (batch) => {
|
||||
// this config defines one file appender (to ./tmp-sync-tests.log)
|
||||
// and sets the log level for "tests" to WARN
|
||||
log4js.configure({
|
||||
appenders: { sync: {
|
||||
type: 'fileSync',
|
||||
filename: 'tmp-sync-tests.log',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
}
|
||||
appenders: {
|
||||
sync: {
|
||||
type: 'fileSync',
|
||||
filename: 'tmp-sync-tests.log',
|
||||
layout: { type: 'messagePassThrough' }
|
||||
}
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ['sync'], level: 'debug' },
|
||||
|
||||
@ -76,7 +76,9 @@ const setupLogging = function (options, category, compressedLength) {
|
||||
exitHandler = handler;
|
||||
}
|
||||
},
|
||||
env: {}
|
||||
removeListener: () => {},
|
||||
env: {},
|
||||
stderr: process.stderr
|
||||
},
|
||||
console: fakeConsole
|
||||
}
|
||||
|
||||
@ -30,7 +30,7 @@ test('log4js layouts', (batch) => {
|
||||
|
||||
assert.equal(
|
||||
output,
|
||||
'\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
|
||||
'\x1B[31m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mnonsense'
|
||||
);
|
||||
assert.end();
|
||||
});
|
||||
@ -47,7 +47,7 @@ test('log4js layouts', (batch) => {
|
||||
colour: 'red'
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
|
||||
assert.equal(output, '\x1B[31m[2010-12-05T14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
@ -151,7 +151,7 @@ test('log4js layouts', (batch) => {
|
||||
}
|
||||
};
|
||||
|
||||
t.equal(layout(event), '[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test');
|
||||
t.equal(layout(event), '[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test');
|
||||
|
||||
t.test('should output a stacktrace, message if the event has an error attached', (assert) => {
|
||||
let i;
|
||||
@ -166,7 +166,7 @@ test('log4js layouts', (batch) => {
|
||||
assert.equal(lines.length, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
'[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
|
||||
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error'
|
||||
);
|
||||
for (i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i], stack[i]);
|
||||
@ -175,7 +175,7 @@ test('log4js layouts', (batch) => {
|
||||
assert.equal(lines.length - 1, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
'[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]'
|
||||
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]'
|
||||
);
|
||||
for (i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i + 2], stack[i + 1]);
|
||||
@ -194,7 +194,7 @@ test('log4js layouts', (batch) => {
|
||||
const output = layout(event);
|
||||
assert.equal(
|
||||
output,
|
||||
'[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test ' +
|
||||
'[2010-12-05T14:18:30.045] [DEBUG] tests - this is a test ' +
|
||||
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
|
||||
);
|
||||
assert.end();
|
||||
@ -287,13 +287,13 @@ test('log4js layouts', (batch) => {
|
||||
});
|
||||
|
||||
t.test('%d should output the date in ISO8601 format', (assert) => {
|
||||
testPattern(assert, layout, event, tokens, '%d', '2010-12-05 14:18:30.045');
|
||||
testPattern(assert, layout, event, tokens, '%d', '2010-12-05T14:18:30.045');
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('%d should allow for format specification', (assert) => {
|
||||
testPattern(assert, layout, event, tokens, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30.045-0000');
|
||||
testPattern(assert, layout, event, tokens, '%d{ISO8601}', '2010-12-05 14:18:30.045');
|
||||
testPattern(assert, layout, event, tokens, '%d{ISO8601}', '2010-12-05T14:18:30.045');
|
||||
testPattern(assert, layout, event, tokens, '%d{ABSOLUTE}', '14:18:30.045');
|
||||
testPattern(assert, layout, event, tokens, '%d{DATE}', '05 12 2010 14:18:30.045');
|
||||
testPattern(assert, layout, event, tokens, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
|
||||
@ -318,7 +318,8 @@ test('log4js layouts', (batch) => {
|
||||
});
|
||||
|
||||
t.test('should handle complicated patterns', (assert) => {
|
||||
testPattern(assert, layout, event, tokens,
|
||||
testPattern(
|
||||
assert, layout, event, tokens,
|
||||
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
|
||||
`this is a test${EOL} of.tests at 14:18:30.045 cheese DEBUG${EOL}`
|
||||
);
|
||||
|
||||
@ -49,126 +49,108 @@ test('levels', (batch) => {
|
||||
|
||||
t.test('ALL', (assert) => {
|
||||
const all = levels.ALL;
|
||||
assertThat(assert, all).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, all).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, all).isLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, all).isNotGreaterThanOrEqualTo([
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, all).isEqualTo([levels.getLevel('ALL')]);
|
||||
assertThat(assert, all).isNotEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, all).isNotEqualTo([
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('TRACE', (assert) => {
|
||||
const trace = levels.TRACE;
|
||||
assertThat(assert, trace).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, trace).isLessThanOrEqualTo([
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, trace).isNotLessThanOrEqualTo([levels.ALL]);
|
||||
assertThat(assert, trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(assert, trace).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, trace).isNotGreaterThanOrEqualTo([
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, trace).isEqualTo([levels.getLevel('TRACE')]);
|
||||
assertThat(assert, trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test('DEBUG', (assert) => {
|
||||
const debug = levels.DEBUG;
|
||||
assertThat(assert, debug).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, debug).isLessThanOrEqualTo([
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(assert, debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(assert, debug).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, debug).isNotGreaterThanOrEqualTo([
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(assert, debug).isEqualTo([levels.getLevel('DEBUG')]);
|
||||
assertThat(assert, debug).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(assert, debug).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
|
||||
@ -138,13 +138,13 @@ test('logstashUDP appender', (batch) => {
|
||||
pattern: '%m'
|
||||
}
|
||||
});
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
|
||||
const json = JSON.parse(setup.results.buffer.toString());
|
||||
t.equal(json.fields.field1, 'value1');
|
||||
t.equal(json.fields.field2, 'evaluated at runtime' );
|
||||
const json = JSON.parse(setup.results.buffer.toString());
|
||||
t.equal(json.fields.field1, 'value1');
|
||||
t.equal(json.fields.field2, 'evaluated at runtime');
|
||||
|
||||
t.end();
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('extra fields should be added to the fields structure', (t) => {
|
||||
|
||||
@ -7,7 +7,11 @@ const fs = require('fs');
|
||||
test('multiFile appender', (batch) => {
|
||||
batch.test('should write to multiple files based on the loggingEvent property', (t) => {
|
||||
log4js.configure({
|
||||
appenders: { multi: { type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log' } },
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'categoryName', extension: '.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerA = log4js.getLogger('A');
|
||||
@ -23,7 +27,11 @@ test('multiFile appender', (batch) => {
|
||||
|
||||
batch.test('should write to multiple files based on loggingEvent.context properties', (t) => {
|
||||
log4js.configure({
|
||||
appenders: { multi: { type: 'multiFile', base: 'logs/', property: 'label', extension: '.log' } },
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'label', extension: '.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerC = log4js.getLogger('cheese');
|
||||
@ -41,7 +49,11 @@ test('multiFile appender', (batch) => {
|
||||
|
||||
batch.test('should fail silently if loggingEvent property has no value', (t) => {
|
||||
log4js.configure({
|
||||
appenders: { multi: { type: 'multiFile', base: 'logs/', property: 'label', extension: '.log' } },
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile', base: 'logs/', property: 'label', extension: '.log'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerE = log4js.getLogger();
|
||||
@ -62,14 +74,16 @@ test('multiFile appender', (batch) => {
|
||||
|
||||
batch.test('should pass options to rolling file stream', (t) => {
|
||||
log4js.configure({
|
||||
appenders: { multi: {
|
||||
type: 'multiFile',
|
||||
base: 'logs/',
|
||||
property: 'label',
|
||||
extension: '.log',
|
||||
maxLogSize: 61,
|
||||
backups: 2
|
||||
} },
|
||||
appenders: {
|
||||
multi: {
|
||||
type: 'multiFile',
|
||||
base: 'logs/',
|
||||
property: 'label',
|
||||
extension: '.log',
|
||||
maxLogSize: 61,
|
||||
backups: 2
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['multi'], level: 'info' } }
|
||||
});
|
||||
const loggerF = log4js.getLogger();
|
||||
|
||||
@ -68,7 +68,11 @@ test('Multiprocess Appender', (batch) => {
|
||||
}
|
||||
);
|
||||
log4js.configure({
|
||||
appenders: { worker: { type: 'multiprocess', mode: 'worker', loggerPort: 1234, loggerHost: 'pants' } },
|
||||
appenders: {
|
||||
worker: {
|
||||
type: 'multiprocess', mode: 'worker', loggerPort: 1234, loggerHost: 'pants'
|
||||
}
|
||||
},
|
||||
categories: { default: { appenders: ['worker'], level: 'trace' } }
|
||||
});
|
||||
|
||||
@ -233,32 +237,22 @@ test('Multiprocess Appender', (batch) => {
|
||||
});
|
||||
|
||||
t.test('when a client connects', (assert) => {
|
||||
const logString = `${JSON.stringify(
|
||||
{
|
||||
level: { level: 10000, levelStr: 'DEBUG' },
|
||||
data: ['some debug']
|
||||
}
|
||||
)}__LOG4JS__`;
|
||||
const logString = `${JSON.stringify({
|
||||
level: { level: 10000, levelStr: 'DEBUG' },
|
||||
data: ['some debug']
|
||||
})}__LOG4JS__`;
|
||||
|
||||
net.cbs.data(
|
||||
`${JSON.stringify(
|
||||
{
|
||||
level: { level: 40000, levelStr: 'ERROR' },
|
||||
data: ['an error message']
|
||||
}
|
||||
)}__LOG4JS__`
|
||||
);
|
||||
net.cbs.data(`${JSON.stringify({
|
||||
level: { level: 40000, levelStr: 'ERROR' },
|
||||
data: ['an error message']
|
||||
})}__LOG4JS__`);
|
||||
net.cbs.data(logString.substring(0, 10));
|
||||
net.cbs.data(logString.substring(10));
|
||||
net.cbs.data(logString + logString + logString);
|
||||
net.cbs.end(
|
||||
`${JSON.stringify(
|
||||
{
|
||||
level: { level: 50000, levelStr: 'FATAL' },
|
||||
data: ["that's all folks"]
|
||||
}
|
||||
)}__LOG4JS__`
|
||||
);
|
||||
net.cbs.end(`${JSON.stringify({
|
||||
level: { level: 50000, levelStr: 'FATAL' },
|
||||
data: ["that's all folks"]
|
||||
})}__LOG4JS__`);
|
||||
net.cbs.data('bad message__LOG4JS__');
|
||||
|
||||
const logEvents = recording.replay();
|
||||
@ -304,11 +298,12 @@ test('Multiprocess Appender', (batch) => {
|
||||
}
|
||||
}
|
||||
);
|
||||
t.throws(() =>
|
||||
log4js.configure({
|
||||
appenders: { master: { type: 'multiprocess', mode: 'master' } },
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
}),
|
||||
t.throws(
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { master: { type: 'multiprocess', mode: 'master' } },
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
}),
|
||||
new Error('multiprocess master must have an "appender" defined')
|
||||
);
|
||||
t.end();
|
||||
@ -325,11 +320,12 @@ test('Multiprocess Appender', (batch) => {
|
||||
}
|
||||
}
|
||||
);
|
||||
t.throws(() =>
|
||||
log4js.configure({
|
||||
appenders: { master: { type: 'multiprocess', mode: 'master', appender: 'cheese' } },
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
}),
|
||||
t.throws(
|
||||
() =>
|
||||
log4js.configure({
|
||||
appenders: { master: { type: 'multiprocess', mode: 'master', appender: 'cheese' } },
|
||||
categories: { default: { appenders: ['master'], level: 'trace' } }
|
||||
}),
|
||||
new Error('multiprocess master appender "cheese" not defined')
|
||||
);
|
||||
t.end();
|
||||
|
||||
@ -118,12 +118,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { cheese: { value: 'biscuits' } },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level "cheese".value must have an integer value'
|
||||
));
|
||||
'level "cheese".value must have an integer value'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -133,12 +131,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { cheese: 'biscuits' },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level "cheese" must be an object'
|
||||
));
|
||||
'level "cheese" must be an object'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -148,12 +144,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { cheese: { thing: 'biscuits' } },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level "cheese" must have a \'value\' property'
|
||||
));
|
||||
'level "cheese" must have a \'value\' property'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -163,12 +157,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { cheese: { value: 3 } },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level "cheese" must have a \'colour\' property'
|
||||
));
|
||||
'level "cheese" must have a \'colour\' property'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -178,12 +170,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { cheese: { value: 3, colour: 'pants' } },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow'
|
||||
));
|
||||
'level "cheese".colour must be one of white, grey, black, blue, cyan, green, magenta, red, yellow'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -193,12 +183,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { '#pants': 3 },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'
|
||||
));
|
||||
'level name "#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -208,12 +196,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { 'thing#pants': 3 },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'
|
||||
));
|
||||
'level name "thing#pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -223,12 +209,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { '1pants': 3 },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'
|
||||
));
|
||||
'level name "1pants" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -238,12 +222,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { '2': 3 },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'
|
||||
));
|
||||
'level name "2" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'));
|
||||
|
||||
t.throws(() => {
|
||||
log4js.configure({
|
||||
@ -253,12 +235,10 @@ test('../../lib/logger', (batch) => {
|
||||
appenders: { stdout: { type: 'stdout' } },
|
||||
categories: { default: { appenders: ['stdout'], level: 'trace' } }
|
||||
});
|
||||
}, new Error(
|
||||
'Problem with log4js configuration: ' +
|
||||
}, new Error('Problem with log4js configuration: ' +
|
||||
"({ levels: { 'cheese!': 3 },\n appenders: { stdout: { type: 'stdout' } },\n" +
|
||||
" categories: { default: { appenders: [ 'stdout' ], level: 'trace' } } }) - " +
|
||||
'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'
|
||||
));
|
||||
'level name "cheese!" is not a valid identifier (must start with a letter, only contain A-Z,a-z,0-9,_)'));
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
|
||||
const test = require('tap').test;
|
||||
const cluster = require('cluster');
|
||||
const debug = require('debug')('log4js:pm2-test');
|
||||
|
||||
// PM2 runs everything as workers
|
||||
// - no master in the cluster (PM2 acts as master itself)
|
||||
@ -14,46 +15,54 @@ if (cluster.isMaster) {
|
||||
cluster.fork({ NODE_APP_INSTANCE: i });
|
||||
});
|
||||
|
||||
cluster.on('message', (worker, msg) => {
|
||||
const messageHandler = (worker, msg) => {
|
||||
if (worker.type || worker.topic) {
|
||||
msg = worker;
|
||||
}
|
||||
if (msg.type === 'testing') {
|
||||
debug(`Received testing message from ${msg.instance} with events ${msg.events}`);
|
||||
appEvents[msg.instance] = msg.events;
|
||||
}
|
||||
|
||||
// we have to do the re-broadcasting that the pm2-intercom module would do.
|
||||
if (msg.topic === 'log4js:message') {
|
||||
debug(`Received log message ${msg}`);
|
||||
for (const id in cluster.workers) {
|
||||
cluster.workers[id].send(msg);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
cluster.on('message', messageHandler);
|
||||
|
||||
let count = 0;
|
||||
cluster.on('exit', () => {
|
||||
count += 1;
|
||||
if (count === 2) {
|
||||
test('PM2 Support', (batch) => {
|
||||
batch.test('should not get any events when turned off', (t) => {
|
||||
t.notOk(appEvents['0'].filter(e => e && e.data[0].indexOf('will not be logged') > -1).length);
|
||||
t.notOk(appEvents['1'].filter(e => e && e.data[0].indexOf('will not be logged') > -1).length);
|
||||
t.end();
|
||||
});
|
||||
// wait for any IPC messages still to come, because it seems they are slooooow.
|
||||
setTimeout(() => {
|
||||
test('PM2 Support', (batch) => {
|
||||
batch.test('should not get any events when turned off', (t) => {
|
||||
t.notOk(appEvents['0'].filter(e => e && e.data[0].indexOf('will not be logged') > -1).length);
|
||||
t.notOk(appEvents['1'].filter(e => e && e.data[0].indexOf('will not be logged') > -1).length);
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should get events on app instance 0', (t) => {
|
||||
t.equal(appEvents['0'].length, 2);
|
||||
t.equal(appEvents['0'][0].data[0], 'this should now get logged');
|
||||
t.equal(appEvents['0'][1].data[0], 'this should now get logged');
|
||||
t.end();
|
||||
});
|
||||
batch.test('should get events on app instance 0', (t) => {
|
||||
t.equal(appEvents['0'].length, 2);
|
||||
t.equal(appEvents['0'][0].data[0], 'this should now get logged');
|
||||
t.equal(appEvents['0'][1].data[0], 'this should now get logged');
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test('should not get events on app instance 1', (t) => {
|
||||
t.equal(appEvents['1'].length, 0);
|
||||
t.end();
|
||||
batch.test('should not get events on app instance 1', (t) => {
|
||||
t.equal(appEvents['1'].length, 0);
|
||||
t.end();
|
||||
});
|
||||
batch.end();
|
||||
cluster.removeListener('message', messageHandler);
|
||||
});
|
||||
batch.end();
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
@ -67,21 +76,30 @@ if (cluster.isMaster) {
|
||||
const logger = log4js.getLogger('test');
|
||||
logger.info('this is a test, but without enabling PM2 support it will not be logged');
|
||||
|
||||
// we have to wait a bit, so that the process.send messages get a chance to propagate
|
||||
// IPC messages can take a while to get through to start with.
|
||||
setTimeout(() => {
|
||||
log4js.configure({
|
||||
appenders: { out: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['out'], level: 'info' } },
|
||||
pm2: true
|
||||
});
|
||||
const anotherLogger = log4js.getLogger('test');
|
||||
anotherLogger.info('this should now get logged');
|
||||
}, 500);
|
||||
log4js.shutdown(() => {
|
||||
log4js.configure({
|
||||
appenders: { out: { type: 'recording' } },
|
||||
categories: { default: { appenders: ['out'], level: 'info' } },
|
||||
pm2: true
|
||||
});
|
||||
const anotherLogger = log4js.getLogger('test');
|
||||
setTimeout(() => {
|
||||
anotherLogger.info('this should now get logged');
|
||||
}, 1000);
|
||||
|
||||
// we have to wait a bit, so that the process.send messages get a chance to propagate
|
||||
setTimeout(() => {
|
||||
const events = recorder.replay();
|
||||
process.send({ type: 'testing', instance: process.env.NODE_APP_INSTANCE, events: events });
|
||||
cluster.worker.disconnect();
|
||||
}, 2500);
|
||||
// if we're the pm2-master we should wait for the other process to send its log messages
|
||||
setTimeout(() => {
|
||||
log4js.shutdown(() => {
|
||||
const events = recorder.replay();
|
||||
debug(`Sending test events ${events} from ${process.env.NODE_APP_INSTANCE}`);
|
||||
process.send(
|
||||
{ type: 'testing', instance: process.env.NODE_APP_INSTANCE, events: events },
|
||||
() => { setTimeout(() => { cluster.worker.disconnect(); }, 1000); }
|
||||
);
|
||||
});
|
||||
}, 3000);
|
||||
});
|
||||
}, 2000);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user