mirror of
https://github.com/log4js-node/log4js-node.git
synced 2025-12-08 19:26:01 +00:00
Merge branch 'master' into logrotate
This commit is contained in:
commit
0b07c1cb7f
7
.gitignore
vendored
7
.gitignore
vendored
@ -1,8 +1,7 @@
|
||||
*.log
|
||||
*.log??
|
||||
*.log*
|
||||
build
|
||||
node_modules
|
||||
.bob/
|
||||
test/streams/test-rolling-file-stream*
|
||||
test/streams/test-rolling-stream-with-existing-files*
|
||||
test/streams/test-*
|
||||
.idea
|
||||
.DS_Store
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
{
|
||||
{
|
||||
"node": true,
|
||||
"laxcomma": true,
|
||||
"indent": 2,
|
||||
"globalstrict": true,
|
||||
"maxparams": 5,
|
||||
"maxparams": 6,
|
||||
"maxdepth": 3,
|
||||
"maxstatements": 20,
|
||||
"maxcomplexity": 5,
|
||||
"maxcomplexity": 10,
|
||||
"maxlen": 100,
|
||||
"globals": {
|
||||
"describe": true,
|
||||
"it": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,2 +0,0 @@
|
||||
*.log
|
||||
*.log??
|
||||
@ -1,5 +1,8 @@
|
||||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- "6"
|
||||
- "5"
|
||||
- "4"
|
||||
- "0.12"
|
||||
- "0.10"
|
||||
- "0.8"
|
||||
|
||||
|
||||
13
LICENSE
Normal file
13
LICENSE
Normal file
@ -0,0 +1,13 @@
|
||||
Copyright 2015 Gareth Jones (with contributions from many other people)
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
19
README.md
19
README.md
@ -1,12 +1,13 @@
|
||||
# log4js-node [](http://travis-ci.org/nomiddlename/log4js-node)
|
||||
|
||||
|
||||
[](https://nodei.co/npm/log4js/)
|
||||
|
||||
This is a conversion of the [log4js](https://github.com/stritti/log4js)
|
||||
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
|
||||
|
||||
Out of the box it supports the following features:
|
||||
|
||||
* coloured console logging
|
||||
* coloured console logging to stdout or stderr
|
||||
* replacement of node's console.log functions (optional)
|
||||
* file appender, with log rolling based on file size
|
||||
* SMTP appender
|
||||
@ -14,6 +15,7 @@ Out of the box it supports the following features:
|
||||
* hook.io appender
|
||||
* Loggly appender
|
||||
* Logstash UDP appender
|
||||
* logFaces appender
|
||||
* multiprocess appender (useful when you've got worker processes)
|
||||
* a logger for connect/express servers
|
||||
* configurable log message layout/patterns
|
||||
@ -89,15 +91,9 @@ configuration file (`log4js.configure('path/to/file.json')`), or a configuration
|
||||
configuration file location may also be specified via the environment variable
|
||||
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
|
||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
|
||||
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
|
||||
You can configure log4js to check for configuration file changes at regular intervals, and if changed, reload. This allows changes to logging levels to occur without restarting the application.
|
||||
|
||||
To turn off configuration file change checking, configure with:
|
||||
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
log4js.configure('my_log4js_configuration.json', {});
|
||||
```
|
||||
To specify a different period:
|
||||
To turn it on and specify a period:
|
||||
|
||||
```javascript
|
||||
log4js.configure('file.json', { reloadSecs: 300 });
|
||||
@ -108,8 +104,9 @@ For FileAppender you can also pass the path to the log directory as an option wh
|
||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||
```
|
||||
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
||||
```json
|
||||
|
||||
#### my_log4js_configuration.json ####
|
||||
```json
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
|
||||
@ -35,11 +35,13 @@ logger.setLevel('ERROR');
|
||||
//console logging methods have been replaced with log4js ones.
|
||||
//so this will get coloured output on console, and appear in cheese.log
|
||||
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
||||
console.log("This should appear as info output");
|
||||
|
||||
//these will not appear (logging level beneath error)
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.log('Something funny about cheese.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
//these end up on the console and in cheese.log
|
||||
logger.error('Cheese %s is too ripe!', "gouda");
|
||||
|
||||
54
examples/hipchat-appender.js
Normal file
54
examples/hipchat-appender.js
Normal file
@ -0,0 +1,54 @@
|
||||
/**
|
||||
* !!! The hipchat-appender requires `hipchat-notifier` from npm, e.g.
|
||||
* - list as a dependency in your application's package.json ||
|
||||
* - npm install hipchat-notifier
|
||||
*/
|
||||
|
||||
var log4js = require('../lib/log4js');
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "hipchat",
|
||||
"hipchat_token": process.env.HIPCHAT_TOKEN || '< User token with Notification Privileges >',
|
||||
"hipchat_room": process.env.HIPCHAT_ROOM || '< Room ID or Name >'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("hipchat");
|
||||
logger.warn("Test Warn message");
|
||||
logger.info("Test Info message");
|
||||
logger.debug("Test Debug Message");
|
||||
logger.trace("Test Trace Message");
|
||||
logger.fatal("Test Fatal Message");
|
||||
logger.error("Test Error Message");
|
||||
|
||||
|
||||
// alternative configuration demonstrating callback + custom layout
|
||||
///////////////////////////////////////////////////////////////////
|
||||
|
||||
// use a custom layout function (in this case, the provided basicLayout)
|
||||
// format: [TIMESTAMP][LEVEL][category] - [message]
|
||||
var customLayout = require('../lib/layouts').basicLayout;
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "hipchat",
|
||||
"hipchat_token": process.env.HIPCHAT_TOKEN || '< User token with Notification Privileges >',
|
||||
"hipchat_room": process.env.HIPCHAT_ROOM || '< Room ID or Name >',
|
||||
"hipchat_from": "Mr. Semantics",
|
||||
"hipchat_notify": false,
|
||||
"hipchat_response_callback": function(err, response, body){
|
||||
if(err || response.statusCode > 300){
|
||||
throw new Error('hipchat-notifier failed');
|
||||
}
|
||||
console.log('mr semantics callback success');
|
||||
},
|
||||
"layout": customLayout
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
logger.info("Test customLayout from Mr. Semantics");
|
||||
24
examples/logFaces-appender.js
Normal file
24
examples/logFaces-appender.js
Normal file
@ -0,0 +1,24 @@
|
||||
var log4js = require('../lib/log4js');
|
||||
|
||||
/*
|
||||
logFaces server configured with UDP receiver, using JSON format,
|
||||
listening on port 55201 will receive the logs from the appender below.
|
||||
*/
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
"type": "logFacesAppender", // (mandatory) appender type
|
||||
"application": "MY-NODEJS", // (optional) name of the application (domain)
|
||||
"remoteHost": "localhost", // (optional) logFaces server host or IP address
|
||||
"port": 55201, // (optional) logFaces UDP receiver port (must use JSON format)
|
||||
"layout": { // (optional) the layout to use for messages
|
||||
"type": "pattern",
|
||||
"pattern": "%m"
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("myLogger");
|
||||
logger.info("Testing message %s", "arg1");
|
||||
24
examples/slack-appender.js
Normal file
24
examples/slack-appender.js
Normal file
@ -0,0 +1,24 @@
|
||||
//Note that slack appender needs slack-node package to work.
|
||||
var log4js = require('../lib/log4js');
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "slack",
|
||||
"token": 'TOKEN',
|
||||
"channel_id": "#CHANNEL",
|
||||
"username": "USERNAME",
|
||||
"format": "text",
|
||||
"category" : "slack",
|
||||
"icon_url" : "ICON_URL"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("slack");
|
||||
logger.warn("Test Warn message");
|
||||
logger.info("Test Info message");
|
||||
logger.debug("Test Debug Message");
|
||||
logger.trace("Test Trace Message");
|
||||
logger.fatal("Test Fatal Message");
|
||||
logger.error("Test Error Message");
|
||||
@ -10,9 +10,9 @@ function categoryFilter (excludes, appender) {
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
function configure(config, options) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||
return categoryFilter(config.exclude, appender);
|
||||
}
|
||||
|
||||
|
||||
@ -11,7 +11,8 @@ function serializeLoggingEvent(loggingEvent) {
|
||||
// The following allows us to serialize errors correctly.
|
||||
for (var i = 0; i < loggingEvent.data.length; i++) {
|
||||
var item = loggingEvent.data[i];
|
||||
if (item && item.stack && JSON.stringify(item) === '{}') { // Validate that we really are in this case
|
||||
// Validate that we really are in this case
|
||||
if (item && item.stack && JSON.stringify(item) === '{}') {
|
||||
loggingEvent.data[i] = {stack : item.stack};
|
||||
}
|
||||
}
|
||||
@ -22,24 +23,33 @@ function serializeLoggingEvent(loggingEvent) {
|
||||
* Takes a string, returns an object with
|
||||
* the correct log properties.
|
||||
*
|
||||
* This method has been "borrowed" from the `multiprocess` appender
|
||||
* by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
|
||||
* This method has been "borrowed" from the `multiprocess` appender
|
||||
* by `nomiddlename`
|
||||
* (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
|
||||
*
|
||||
* Apparently, node.js serializes everything to strings when using `process.send()`,
|
||||
* so we need smart deserialization that will recreate log date and level for further processing by log4js internals.
|
||||
* Apparently, node.js serializes everything to strings when using `process.send()`,
|
||||
* so we need smart deserialization that will recreate log date and level for further
|
||||
* processing by log4js internals.
|
||||
*/
|
||||
function deserializeLoggingEvent(loggingEventString) {
|
||||
|
||||
var loggingEvent;
|
||||
|
||||
|
||||
try {
|
||||
|
||||
|
||||
loggingEvent = JSON.parse(loggingEventString);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
|
||||
// Unwrap serialized errors
|
||||
for (var i = 0; i < loggingEvent.data.length; i++) {
|
||||
var item = loggingEvent.data[i];
|
||||
if (item && item.stack) {
|
||||
loggingEvent.data[i] = item.stack;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
|
||||
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
@ -49,16 +59,16 @@ function deserializeLoggingEvent(loggingEventString) {
|
||||
};
|
||||
}
|
||||
return loggingEvent;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appender.
|
||||
* Creates an appender.
|
||||
*
|
||||
* If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
|
||||
* Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
|
||||
*
|
||||
* If you are using this method directly, make sure to provide it with `config.actualAppenders` array
|
||||
* of actual appender instances.
|
||||
* If you are using this method directly, make sure to provide it with `config.actualAppenders`
|
||||
* array of actual appender instances.
|
||||
*
|
||||
* Or better use `configure(config, options)`
|
||||
*/
|
||||
@ -67,34 +77,45 @@ function createAppender(config) {
|
||||
if (cluster.isMaster) {
|
||||
|
||||
var masterAppender = function(loggingEvent) {
|
||||
|
||||
|
||||
if (config.actualAppenders) {
|
||||
var size = config.actualAppenders.length;
|
||||
for(var i = 0; i < size; i++) {
|
||||
if (!config.appenders[i].category || config.appenders[i].category === loggingEvent.categoryName) {
|
||||
// Relying on the index is not a good practice but otherwise the change would have been bigger.
|
||||
config.actualAppenders[i](loggingEvent);
|
||||
}
|
||||
if (
|
||||
!config.appenders[i].category ||
|
||||
config.appenders[i].category === loggingEvent.categoryName
|
||||
) {
|
||||
// Relying on the index is not a good practice but otherwise
|
||||
// the change would have been bigger.
|
||||
config.actualAppenders[i](loggingEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// Listen on new workers
|
||||
cluster.on('fork', function(worker) {
|
||||
|
||||
|
||||
worker.on('message', function(message) {
|
||||
if (message.type && message.type === '::log-message') {
|
||||
// console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event));
|
||||
|
||||
var loggingEvent = deserializeLoggingEvent(message.event);
|
||||
|
||||
// Adding PID metadata
|
||||
loggingEvent.pid = worker.process.pid;
|
||||
loggingEvent.cluster = {
|
||||
master: process.pid,
|
||||
worker: worker.process.pid,
|
||||
workerId: worker.id
|
||||
};
|
||||
|
||||
masterAppender(loggingEvent);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
|
||||
return masterAppender;
|
||||
|
||||
|
||||
} else {
|
||||
|
||||
return function(loggingEvent) {
|
||||
@ -103,27 +124,30 @@ function createAppender(config) {
|
||||
// console.log("worker " + cluster.worker.id + " is sending message");
|
||||
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
|
||||
if (config.appenders && cluster.isMaster) {
|
||||
|
||||
|
||||
var size = config.appenders.length;
|
||||
config.actualAppenders = new Array(size);
|
||||
|
||||
|
||||
for(var i = 0; i < size; i++) {
|
||||
|
||||
|
||||
log4js.loadAppender(config.appenders[i].type);
|
||||
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](config.appenders[i], options);
|
||||
|
||||
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](
|
||||
config.appenders[i],
|
||||
options
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
exports.configure = configure;
|
||||
|
||||
@ -2,10 +2,10 @@
|
||||
var layouts = require('../layouts')
|
||||
, consoleLog = console.log.bind(console);
|
||||
|
||||
function consoleAppender (layout) {
|
||||
function consoleAppender (layout, timezoneOffset) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
consoleLog(layout(loggingEvent));
|
||||
consoleLog(layout(loggingEvent, timezoneOffset));
|
||||
};
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ function configure(config) {
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
return consoleAppender(layout, config.timezoneOffset);
|
||||
}
|
||||
|
||||
exports.appender = consoleAppender;
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
"use strict";
|
||||
var streams = require('../streams')
|
||||
, layouts = require('../layouts')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
@ -20,8 +19,9 @@ process.on('exit', function() {
|
||||
* @pattern the format that will be added to the end of filename when rolling,
|
||||
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
||||
* @layout layout function for log messages - defaults to basicLayout
|
||||
* @timezoneOffset optional timezone offset in minutes - defaults to system local
|
||||
*/
|
||||
function appender(filename, pattern, alwaysIncludePattern, layout) {
|
||||
function appender(filename, pattern, alwaysIncludePattern, layout, timezoneOffset) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
|
||||
var logFile = new streams.DateRollingFileStream(
|
||||
@ -32,7 +32,7 @@ function appender(filename, pattern, alwaysIncludePattern, layout) {
|
||||
openFiles.push(logFile);
|
||||
|
||||
return function(logEvent) {
|
||||
logFile.write(layout(logEvent) + eol, "utf8");
|
||||
logFile.write(layout(logEvent, timezoneOffset) + eol, "utf8");
|
||||
};
|
||||
|
||||
}
|
||||
@ -52,19 +52,37 @@ function configure(config, options) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
|
||||
return appender(
|
||||
config.filename,
|
||||
config.pattern,
|
||||
config.alwaysIncludePattern,
|
||||
layout,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
async.each(openFiles, function(file, done) {
|
||||
var completed = 0;
|
||||
var error;
|
||||
var complete = function(err) {
|
||||
error = error || err;
|
||||
completed++;
|
||||
if (completed >= openFiles.length) {
|
||||
cb(error);
|
||||
}
|
||||
};
|
||||
if (!openFiles.length) {
|
||||
return cb();
|
||||
}
|
||||
openFiles.forEach(function(file) {
|
||||
if (!file.write(eol, "utf-8")) {
|
||||
file.once('drain', function() {
|
||||
file.end(done);
|
||||
file.end(complete);
|
||||
});
|
||||
} else {
|
||||
file.end(done);
|
||||
file.end(complete);
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
}
|
||||
|
||||
exports.appender = appender;
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('../streams')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, openFiles = [];
|
||||
, openFiles = []
|
||||
, levels = require('../levels');
|
||||
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
@ -28,14 +28,16 @@ process.on('SIGHUP', function() {
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
* @param compress - flag that controls log file compression
|
||||
* @param timezoneOffset - optional timezone offset in minutes (default system local)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups) {
|
||||
function fileAppender (file, layout, logSize, numBackups, compress, timezoneOffset) {
|
||||
var bytesWritten = 0;
|
||||
file = path.normalize(file);
|
||||
layout = layout || layouts.basicLayout;
|
||||
@ -55,8 +57,9 @@ function fileAppender (file, layout, logSize, numBackups) {
|
||||
openFiles.push(writer);
|
||||
|
||||
return function(loggingEvent) {
|
||||
writer.stream.write(layout(loggingEvent) + eol, "utf8");
|
||||
writer.stream.write(layout(loggingEvent, timezoneOffset) + eol, "utf8");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
function openTheStream(file, fileSize, numFiles) {
|
||||
@ -65,7 +68,8 @@ function openTheStream(file, fileSize, numFiles) {
|
||||
stream = new streams.RollingFileStream(
|
||||
file,
|
||||
fileSize,
|
||||
numFiles
|
||||
numFiles,
|
||||
{ "compress": compress }
|
||||
);
|
||||
} else {
|
||||
stream = fs.createWriteStream(
|
||||
@ -92,21 +96,40 @@ function configure(config, options) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||
return fileAppender(
|
||||
config.filename,
|
||||
layout,
|
||||
config.maxLogSize,
|
||||
config.backups,
|
||||
config.compress,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
async.each(openFiles, function(file, done) {
|
||||
var completed = 0;
|
||||
var error;
|
||||
var complete = function(err) {
|
||||
error = error || err;
|
||||
completed++;
|
||||
if (completed >= openFiles.length) {
|
||||
cb(error);
|
||||
}
|
||||
};
|
||||
if (!openFiles.length) {
|
||||
return cb();
|
||||
}
|
||||
openFiles.forEach(function(file) {
|
||||
var stream = file.stream;
|
||||
if (!stream.write(eol, "utf-8")) {
|
||||
stream.once('drain', function() {
|
||||
stream.end(done);
|
||||
stream.end(complete);
|
||||
});
|
||||
} else {
|
||||
stream.end(done);
|
||||
stream.end(complete);
|
||||
}
|
||||
}, cb);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
||||
|
||||
@ -16,15 +16,15 @@ function RollingFileSync (filename, size, backups, options) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
|
||||
this.filename = filename;
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
||||
this.currentSize = 0;
|
||||
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
@ -47,15 +47,15 @@ RollingFileSync.prototype.shouldRoll = function() {
|
||||
RollingFileSync.prototype.roll = function(filename) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
@ -77,7 +77,7 @@ RollingFileSync.prototype.roll = function(filename) {
|
||||
} catch(e) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
}
|
||||
|
||||
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.renameSync(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1));
|
||||
}
|
||||
@ -86,7 +86,7 @@ RollingFileSync.prototype.roll = function(filename) {
|
||||
function renameTheFiles() {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
|
||||
|
||||
var files = fs.readdirSync(path.dirname(filename));
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
|
||||
}
|
||||
@ -97,8 +97,8 @@ RollingFileSync.prototype.roll = function(filename) {
|
||||
|
||||
RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
var that = this;
|
||||
|
||||
|
||||
|
||||
|
||||
function writeTheChunk() {
|
||||
debug("writing the chunk to the file");
|
||||
that.currentSize += chunk.length;
|
||||
@ -106,13 +106,13 @@ RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
}
|
||||
|
||||
debug("in write");
|
||||
|
||||
|
||||
|
||||
if (this.shouldRoll()) {
|
||||
this.currentSize = 0;
|
||||
this.roll(this.filename);
|
||||
}
|
||||
|
||||
|
||||
writeTheChunk();
|
||||
};
|
||||
|
||||
@ -121,14 +121,16 @@ RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
* @param timezoneOffset - optional timezone offset in minutes
|
||||
* (default system local)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups) {
|
||||
function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
|
||||
debug("fileSync appender created");
|
||||
var bytesWritten = 0;
|
||||
file = path.normalize(file);
|
||||
@ -139,7 +141,7 @@ function fileAppender (file, layout, logSize, numBackups) {
|
||||
|
||||
function openTheStream(file, fileSize, numFiles) {
|
||||
var stream;
|
||||
|
||||
|
||||
if (fileSize) {
|
||||
stream = new RollingFileSync(
|
||||
file,
|
||||
@ -151,7 +153,7 @@ function fileAppender (file, layout, logSize, numBackups) {
|
||||
// create file if it doesn't exist
|
||||
if (!fs.existsSync(f))
|
||||
fs.appendFileSync(f, '');
|
||||
|
||||
|
||||
return {
|
||||
write: function(data) {
|
||||
fs.appendFileSync(f, data);
|
||||
@ -164,9 +166,9 @@ function fileAppender (file, layout, logSize, numBackups) {
|
||||
}
|
||||
|
||||
var logFile = openTheStream(file, logSize, numBackups);
|
||||
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent) + eol);
|
||||
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
|
||||
};
|
||||
}
|
||||
|
||||
@ -180,7 +182,13 @@ function configure(config, options) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||
return fileAppender(
|
||||
config.filename,
|
||||
layout,
|
||||
config.maxLogSize,
|
||||
config.backups,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
|
||||
@ -25,6 +25,8 @@ levelMapping[levels.WARN] = LOG_WARNING;
|
||||
levelMapping[levels.ERROR] = LOG_ERR;
|
||||
levelMapping[levels.FATAL] = LOG_CRIT;
|
||||
|
||||
var client;
|
||||
|
||||
/**
|
||||
* GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
|
||||
*
|
||||
@ -34,6 +36,7 @@ levelMapping[levels.FATAL] = LOG_CRIT;
|
||||
* @param hostname - hostname of the current host (default:os hostname)
|
||||
* @param facility - facility to log to (default:nodejs-server)
|
||||
*/
|
||||
/* jshint maxstatements:21 */
|
||||
function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var config, customFields;
|
||||
if (typeof(host) === 'object') {
|
||||
@ -44,7 +47,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
facility = config.facility;
|
||||
customFields = config.customFields;
|
||||
}
|
||||
|
||||
|
||||
host = host || 'localhost';
|
||||
port = port || 12201;
|
||||
hostname = hostname || require('os').hostname();
|
||||
@ -53,18 +56,18 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var defaultCustomFields = customFields || {};
|
||||
|
||||
if(facility) {
|
||||
defaultCustomFields['_facility'] = facility;
|
||||
defaultCustomFields._facility = facility;
|
||||
}
|
||||
|
||||
var client = dgram.createSocket("udp4");
|
||||
|
||||
client = dgram.createSocket("udp4");
|
||||
|
||||
process.on('exit', function() {
|
||||
if (client) client.close();
|
||||
});
|
||||
|
||||
/**
|
||||
* Add custom fields (start with underscore )
|
||||
* - if the first object passed to the logger contains 'GELF' field,
|
||||
* Add custom fields (start with underscore )
|
||||
* - if the first object passed to the logger contains 'GELF' field,
|
||||
* copy the underscore fields to the message
|
||||
* @param loggingEvent
|
||||
* @param msg
|
||||
@ -74,7 +77,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
/* append defaultCustomFields firsts */
|
||||
Object.keys(defaultCustomFields).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) && key !== "_id") {
|
||||
if (key.match(/^_/) && key !== "_id") {
|
||||
msg[key] = defaultCustomFields[key];
|
||||
}
|
||||
});
|
||||
@ -83,35 +86,37 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var data = loggingEvent.data;
|
||||
if (!Array.isArray(data) || data.length === 0) return;
|
||||
var firstData = data[0];
|
||||
|
||||
|
||||
if (!firstData.GELF) return; // identify with GELF field defined
|
||||
// Remove the GELF key, some gelf supported logging systems drop the message with it
|
||||
delete firstData.GELF;
|
||||
Object.keys(firstData).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) || key !== "_id") {
|
||||
if (key.match(/^_/) || key !== "_id") {
|
||||
msg[key] = firstData[key];
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/* the custom field object should be removed, so it will not be looged by the later appenders */
|
||||
loggingEvent.data.shift();
|
||||
loggingEvent.data.shift();
|
||||
}
|
||||
|
||||
|
||||
function preparePacket(loggingEvent) {
|
||||
var msg = {};
|
||||
addCustomFields(loggingEvent, msg);
|
||||
msg.short_message = layout(loggingEvent);
|
||||
|
||||
|
||||
msg.version="1.1";
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.host = hostname;
|
||||
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
|
||||
return msg;
|
||||
}
|
||||
|
||||
|
||||
function sendPacket(packet) {
|
||||
try {
|
||||
client.send(packet, 0, packet.length, port, host);
|
||||
} catch(e) {}
|
||||
client.send(packet, 0, packet.length, port, host, function(err) {
|
||||
if (err) { console.error(err); }
|
||||
});
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
@ -138,5 +143,13 @@ function configure(config) {
|
||||
return gelfAppender(layout, config);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
if (client) {
|
||||
client.close(cb);
|
||||
client = null;
|
||||
}
|
||||
}
|
||||
|
||||
exports.appender = gelfAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
||||
|
||||
90
lib/appenders/hipchat.js
Normal file
90
lib/appenders/hipchat.js
Normal file
@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
|
||||
var hipchat = require('hipchat-notifier');
|
||||
var layouts = require('../layouts');
|
||||
|
||||
exports.name = 'hipchat';
|
||||
exports.appender = hipchatAppender;
|
||||
exports.configure = hipchatConfigure;
|
||||
|
||||
/**
|
||||
@invoke as
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "hipchat",
|
||||
"hipchat_token": "< User token with Notification Privileges >",
|
||||
"hipchat_room": "< Room ID or Name >",
|
||||
// optionl
|
||||
"hipchat_from": "[ additional from label ]",
|
||||
"hipchat_notify": "[ notify boolean to bug people ]",
|
||||
"hipchat_host" : "api.hipchat.com"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("hipchat");
|
||||
logger.warn("Test Warn message");
|
||||
|
||||
@invoke
|
||||
*/
|
||||
|
||||
function hipchatNotifierResponseCallback(err, response, body){
|
||||
if(err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function hipchatAppender(config) {
|
||||
|
||||
var notifier = hipchat.make(config.hipchat_room, config.hipchat_token);
|
||||
|
||||
// @lint W074 This function's cyclomatic complexity is too high. (10)
|
||||
return function(loggingEvent){
|
||||
|
||||
var notifierFn;
|
||||
|
||||
notifier.setRoom(config.hipchat_room);
|
||||
notifier.setFrom(config.hipchat_from || '');
|
||||
notifier.setNotify(config.hipchat_notify || false);
|
||||
|
||||
if(config.hipchat_host) {
|
||||
notifier.setHost(config.hipchat_host);
|
||||
}
|
||||
|
||||
switch (loggingEvent.level.toString()) {
|
||||
case "TRACE":
|
||||
case "DEBUG":
|
||||
notifierFn = "info";
|
||||
break;
|
||||
case "WARN":
|
||||
notifierFn = "warning";
|
||||
break;
|
||||
case "ERROR":
|
||||
case "FATAL":
|
||||
notifierFn = "failure";
|
||||
break;
|
||||
default:
|
||||
notifierFn = "success";
|
||||
}
|
||||
|
||||
// @TODO, re-work in timezoneOffset ?
|
||||
var layoutMessage = config.layout(loggingEvent);
|
||||
|
||||
// dispatch hipchat api request, do not return anything
|
||||
// [overide hipchatNotifierResponseCallback]
|
||||
notifier[notifierFn](layoutMessage, config.hipchat_response_callback ||
|
||||
hipchatNotifierResponseCallback);
|
||||
};
|
||||
}
|
||||
|
||||
function hipchatConfigure(config) {
|
||||
var layout;
|
||||
|
||||
if (!config.layout) {
|
||||
config.layout = layouts.messagePassThroughLayout;
|
||||
}
|
||||
|
||||
return hipchatAppender(config, layout);
|
||||
}
|
||||
71
lib/appenders/logFacesAppender.js
Normal file
71
lib/appenders/logFacesAppender.js
Normal file
@ -0,0 +1,71 @@
|
||||
/**
|
||||
* logFaces appender sends JSON formatted log events to logFaces server UDP receivers.
|
||||
* Events contain the following properties:
|
||||
* - application name (taken from configuration)
|
||||
* - host name (taken from underlying os)
|
||||
* - time stamp
|
||||
* - level
|
||||
* - logger name (e.g. category)
|
||||
* - thread name (current process id)
|
||||
* - message text
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
var dgram = require('dgram'),
|
||||
layouts = require('../layouts'),
|
||||
os = require('os'),
|
||||
util = require('util');
|
||||
|
||||
try{
|
||||
var process = require('process');
|
||||
}
|
||||
catch(error){
|
||||
//this module is optional as it may not be available
|
||||
//in older versions of node.js, so ignore if it failes to load
|
||||
}
|
||||
|
||||
function logFacesAppender (config, layout) {
|
||||
var lfsSock = dgram.createSocket('udp4');
|
||||
var localhost = "";
|
||||
|
||||
if(os && os.hostname())
|
||||
localhost = os.hostname().toString();
|
||||
|
||||
var pid = "";
|
||||
if(process && process.pid)
|
||||
pid = process.pid;
|
||||
|
||||
return function log(loggingEvent) {
|
||||
var lfsEvent = {
|
||||
a: config.application || "", // application name
|
||||
h: localhost, // this host name
|
||||
t: loggingEvent.startTime.getTime(), // time stamp
|
||||
p: loggingEvent.level.levelStr, // level (priority)
|
||||
g: loggingEvent.categoryName, // logger name
|
||||
r: pid, // thread (process id)
|
||||
m: layout(loggingEvent) // message text
|
||||
};
|
||||
|
||||
var buffer = new Buffer(JSON.stringify(lfsEvent));
|
||||
var lfsHost = config.remoteHost || "127.0.0.1";
|
||||
var lfsPort = config.port || 55201;
|
||||
lfsSock.send(buffer, 0, buffer.length, lfsPort, lfsHost, function(err, bytes) {
|
||||
if(err) {
|
||||
console.error("log4js.logFacesAppender send to %s:%d failed, error: %s",
|
||||
config.host, config.port, util.inspect(err));
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout)
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
else
|
||||
layout = layouts.layout("pattern", {"type": "pattern", "pattern": "%m"});
|
||||
return logFacesAppender(config, layout);
|
||||
}
|
||||
|
||||
exports.appender = logFacesAppender;
|
||||
exports.configure = configure;
|
||||
@ -5,14 +5,52 @@ var layouts = require('../layouts')
|
||||
, passThrough = layouts.messagePassThroughLayout;
|
||||
|
||||
|
||||
function isAnyObject(value) {
|
||||
return value !== null && (typeof value === 'object' || typeof value === 'function');
|
||||
}
|
||||
|
||||
function numKeys(o) {
|
||||
var res = 0;
|
||||
for (var k in o) {
|
||||
if (o.hasOwnProperty(k)) res++;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loggly Appender. Sends logging events to Loggly using node-loggly
|
||||
* @param msg - array of args for logging.
|
||||
* @returns { deTaggedMsg: [], additionalTags: [] }
|
||||
*/
|
||||
function processTags(msgListArgs) {
|
||||
var msgList = (msgListArgs.length === 1 ? [msgListArgs[0]] : Array.apply(null, msgListArgs));
|
||||
|
||||
return msgList.reduce(function (accum, element, currentIndex, array) {
|
||||
if (isAnyObject(element) && Array.isArray(element.tags) && numKeys(element) == 1) {
|
||||
accum.additionalTags = accum.additionalTags.concat(element.tags);
|
||||
} else {
|
||||
accum.deTaggedData.push(element);
|
||||
}
|
||||
return accum;
|
||||
}, { deTaggedData: [], additionalTags: [] });
|
||||
}
|
||||
|
||||
/**
|
||||
* Loggly Appender. Sends logging events to Loggly using node-loggly, optionally adding tags.
|
||||
*
|
||||
* This appender will scan the msg from the logging event, and pull out any argument of the
|
||||
* shape `{ tags: [] }` so that it's possibleto add tags in a normal logging call.
|
||||
*
|
||||
* For example:
|
||||
*
|
||||
* logger.info({ tags: ['my-tag-1', 'my-tag-2'] }, 'Some message', someObj, ...)
|
||||
*
|
||||
* And then this appender will remove the tags param and append it to the config.tags.
|
||||
*
|
||||
* @param config object with loggly configuration data
|
||||
* {
|
||||
* token: 'your-really-long-input-token',
|
||||
* subdomain: 'your-subdomain',
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* }
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
|
||||
*/
|
||||
@ -21,14 +59,22 @@ function logglyAppender(config, layout) {
|
||||
if(!layout) layout = passThrough;
|
||||
|
||||
return function(loggingEvent) {
|
||||
var msg = layout(loggingEvent);
|
||||
var result = processTags(loggingEvent.data);
|
||||
var deTaggedData = result.deTaggedData;
|
||||
var additionalTags = result.additionalTags;
|
||||
|
||||
// Replace the data property with the deTaggedData
|
||||
loggingEvent.data = deTaggedData;
|
||||
|
||||
var msg = layout(loggingEvent);
|
||||
|
||||
client.log({
|
||||
msg: msg,
|
||||
level: loggingEvent.level.levelStr,
|
||||
category: loggingEvent.categoryName,
|
||||
hostname: os.hostname().toString(),
|
||||
});
|
||||
}
|
||||
}, additionalTags);
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
|
||||
@ -6,22 +6,40 @@ var layouts = require('../layouts')
|
||||
function logstashUDP (config, layout) {
|
||||
var udp = dgram.createSocket('udp4');
|
||||
var type = config.logType ? config.logType : config.category;
|
||||
layout = layout || layouts.colouredLayout;
|
||||
layout = layout || layouts.dummyLayout;
|
||||
if(!config.fields) {
|
||||
config.fields = {};
|
||||
}
|
||||
return function(loggingEvent) {
|
||||
var logMessage = layout(loggingEvent);
|
||||
var fields = {};
|
||||
for(var i in config.fields) {
|
||||
fields[i] = config.fields[i];
|
||||
return function log(loggingEvent) {
|
||||
|
||||
/*
|
||||
https://gist.github.com/jordansissel/2996677
|
||||
{
|
||||
"message" => "hello world",
|
||||
"@version" => "1",
|
||||
"@timestamp" => "2014-04-22T23:03:14.111Z",
|
||||
"type" => "stdin",
|
||||
"host" => "hello.local"
|
||||
}
|
||||
@timestamp is the ISO8601 high-precision timestamp for the event.
|
||||
@version is the version number of this json schema
|
||||
Every other field is valid and fine.
|
||||
*/
|
||||
|
||||
if (loggingEvent.data.length > 1) {
|
||||
var secondEvData = loggingEvent.data[1];
|
||||
for (var k in secondEvData) {
|
||||
config.fields[k] = secondEvData[k];
|
||||
}
|
||||
}
|
||||
fields['level'] = loggingEvent.level.levelStr;
|
||||
config.fields.level = loggingEvent.level.levelStr;
|
||||
|
||||
var logObject = {
|
||||
'@timestamp': (new Date(loggingEvent.startTime)).toISOString(),
|
||||
type: type,
|
||||
message: logMessage,
|
||||
fields: fields
|
||||
"@version" : "1",
|
||||
"@timestamp" : (new Date(loggingEvent.startTime)).toISOString(),
|
||||
"type" : config.logType ? config.logType : config.category,
|
||||
"message" : layout(loggingEvent),
|
||||
"fields" : config.fields
|
||||
};
|
||||
sendLog(udp, config.host, config.port, logObject);
|
||||
};
|
||||
|
||||
43
lib/appenders/mailgun.js
Normal file
43
lib/appenders/mailgun.js
Normal file
@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts');
|
||||
var layout;
|
||||
var config;
|
||||
var mailgun;
|
||||
|
||||
function mailgunAppender(_config, _layout) {
|
||||
|
||||
config = _config;
|
||||
layout = _layout || layouts.basicLayout;
|
||||
|
||||
return function (loggingEvent) {
|
||||
|
||||
var data = {
|
||||
from: _config.from,
|
||||
to: _config.to,
|
||||
subject: _config.subject,
|
||||
text: layout(loggingEvent, config.timezoneOffset)
|
||||
};
|
||||
|
||||
mailgun.messages().send(data, function (error, body) {
|
||||
if (error !== null) console.error("log4js.mailgunAppender - Error happened", error);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(_config) {
|
||||
config = _config;
|
||||
|
||||
if (_config.layout) {
|
||||
layout = layouts.layout(_config.layout.type, _config.layout);
|
||||
}
|
||||
|
||||
mailgun = require('mailgun-js')({
|
||||
apiKey: _config.apikey,
|
||||
domain: _config.domain
|
||||
});
|
||||
|
||||
return mailgunAppender(_config, layout);
|
||||
}
|
||||
|
||||
exports.appender = mailgunAppender;
|
||||
exports.configure = configure;
|
||||
@ -9,7 +9,7 @@ var log4js = require('../log4js')
|
||||
* set up that appender).
|
||||
*/
|
||||
function logServer(config) {
|
||||
|
||||
|
||||
/**
|
||||
* Takes a utf-8 string, returns an object with
|
||||
* the correct log properties.
|
||||
@ -32,21 +32,21 @@ function logServer(config) {
|
||||
|
||||
loggingEvent.remoteAddress = clientSocket.remoteAddress;
|
||||
loggingEvent.remotePort = clientSocket.remotePort;
|
||||
|
||||
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
|
||||
var actualAppender = config.actualAppender,
|
||||
server = net.createServer(function serverCreated(clientSocket) {
|
||||
clientSocket.setEncoding('utf8');
|
||||
var logMessage = '';
|
||||
|
||||
|
||||
function logTheMessage(msg) {
|
||||
if (logMessage.length > 0) {
|
||||
actualAppender(deserializeLoggingEvent(clientSocket, msg));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function chunkReceived(chunk) {
|
||||
var event;
|
||||
logMessage += chunk || '';
|
||||
@ -58,13 +58,13 @@ function logServer(config) {
|
||||
chunkReceived();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
clientSocket.on('data', chunkReceived);
|
||||
clientSocket.on('end', chunkReceived);
|
||||
});
|
||||
|
||||
|
||||
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
|
||||
|
||||
return actualAppender;
|
||||
}
|
||||
|
||||
@ -72,9 +72,9 @@ function workerAppender(config) {
|
||||
var canWrite = false,
|
||||
buffer = [],
|
||||
socket;
|
||||
|
||||
|
||||
createSocket();
|
||||
|
||||
|
||||
function createSocket() {
|
||||
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
socket.on('connect', function() {
|
||||
@ -85,24 +85,25 @@ function workerAppender(config) {
|
||||
//don't bother listening for 'error', 'close' gets called after that anyway
|
||||
socket.on('close', createSocket);
|
||||
}
|
||||
|
||||
|
||||
function emptyBuffer() {
|
||||
var evt;
|
||||
while ((evt = buffer.shift())) {
|
||||
write(evt);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function write(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { // Validate that we really are in this case
|
||||
// Validate that we really are in this case
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') {
|
||||
loggingEvent = {stack : loggingEvent.stack};
|
||||
}
|
||||
socket.write(JSON.stringify(loggingEvent), 'utf8');
|
||||
socket.write(END_MSG, 'utf8');
|
||||
}
|
||||
|
||||
|
||||
return function log(loggingEvent) {
|
||||
if (canWrite) {
|
||||
write(loggingEvent);
|
||||
|
||||
44
lib/appenders/slack.js
Normal file
44
lib/appenders/slack.js
Normal file
@ -0,0 +1,44 @@
|
||||
"use strict";
|
||||
var Slack = require('slack-node');
|
||||
var layouts = require('../layouts');
|
||||
var layout;
|
||||
|
||||
var slack, config;
|
||||
|
||||
function slackAppender(_config, _layout) {
|
||||
|
||||
layout = _layout || layouts.basicLayout;
|
||||
|
||||
return function (loggingEvent) {
|
||||
|
||||
var data = {
|
||||
channel_id: _config.channel_id,
|
||||
text: layout(loggingEvent, _config.timezoneOffset),
|
||||
icon_url: _config.icon_url,
|
||||
username: _config.username
|
||||
};
|
||||
|
||||
slack.api('chat.postMessage', {
|
||||
channel: data.channel_id,
|
||||
text: data.text,
|
||||
icon_url: data.icon_url,username: data.username}, function (err, response) {
|
||||
if (err) { throw err; }
|
||||
});
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
function configure(_config) {
|
||||
|
||||
if (_config.layout) {
|
||||
layout = layouts.layout(_config.layout.type, _config.layout);
|
||||
}
|
||||
|
||||
slack = new Slack(_config.token);
|
||||
|
||||
return slackAppender(_config, layout);
|
||||
}
|
||||
|
||||
exports.name = 'slack';
|
||||
exports.appender = slackAppender;
|
||||
exports.configure = configure;
|
||||
@ -1,82 +1,152 @@
|
||||
"use strict";
|
||||
var layouts = require("../layouts")
|
||||
, mailer = require("nodemailer")
|
||||
, os = require('os');
|
||||
|
||||
/**
|
||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||
* It can either send an email on each event or group several
|
||||
* logging events gathered during specified interval.
|
||||
*
|
||||
* @param config appender configuration data
|
||||
* config.sendInterval time between log emails (in seconds), if 0
|
||||
* then every event sends an email
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
*/
|
||||
function smtpAppender(config, layout) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
var subjectLayout = layouts.messagePassThroughLayout;
|
||||
var sendInterval = config.sendInterval*1000 || 0;
|
||||
|
||||
var logEventBuffer = [];
|
||||
var sendTimer;
|
||||
|
||||
function sendBuffer() {
|
||||
var layouts = require("../layouts");
|
||||
var mailer = require("nodemailer");
|
||||
var os = require('os');
|
||||
|
||||
var logEventBuffer = [];
|
||||
var subjectLayout;
|
||||
var layout;
|
||||
|
||||
var unsentCount = 0;
|
||||
var shutdownTimeout;
|
||||
|
||||
var sendInterval;
|
||||
var sendTimer;
|
||||
|
||||
var config;
|
||||
|
||||
function sendBuffer() {
|
||||
if (logEventBuffer.length > 0) {
|
||||
|
||||
var transport = mailer.createTransport(config.transport, config[config.transport]);
|
||||
var firstEvent = logEventBuffer[0];
|
||||
var body = "";
|
||||
while (logEventBuffer.length > 0) {
|
||||
body += layout(logEventBuffer.shift()) + "\n";
|
||||
}
|
||||
|
||||
var msg = {
|
||||
to: config.recipients,
|
||||
subject: config.subject || subjectLayout(firstEvent),
|
||||
text: body,
|
||||
headers: { "Hostname": os.hostname() }
|
||||
};
|
||||
if (config.sender) {
|
||||
msg.from = config.sender;
|
||||
}
|
||||
transport.sendMail(msg, function(error, success) {
|
||||
if (error) {
|
||||
console.error("log4js.smtpAppender - Error happened", error);
|
||||
var transportOpts = getTransportOptions(config);
|
||||
var transport = mailer.createTransport(transportOpts);
|
||||
var firstEvent = logEventBuffer[0];
|
||||
var body = "";
|
||||
var count = logEventBuffer.length;
|
||||
while (logEventBuffer.length > 0) {
|
||||
body += layout(logEventBuffer.shift(), config.timezoneOffset) + "\n";
|
||||
}
|
||||
transport.close();
|
||||
});
|
||||
|
||||
var msg = {
|
||||
to: config.recipients,
|
||||
subject: config.subject || subjectLayout(firstEvent),
|
||||
headers: {"Hostname": os.hostname()}
|
||||
};
|
||||
|
||||
if (true === config.attachment.enable) {
|
||||
msg[config.html ? "html" : "text"] = config.attachment.message;
|
||||
msg.attachments = [
|
||||
{
|
||||
filename: config.attachment.filename,
|
||||
contentType: 'text/x-log',
|
||||
content: body
|
||||
}
|
||||
];
|
||||
} else {
|
||||
msg[config.html ? "html" : "text"] = body;
|
||||
}
|
||||
|
||||
if (config.sender) {
|
||||
msg.from = config.sender;
|
||||
}
|
||||
transport.sendMail(msg, function (error) {
|
||||
if (error) {
|
||||
console.error("log4js.smtpAppender - Error happened", error);
|
||||
}
|
||||
transport.close();
|
||||
unsentCount -= count;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function scheduleSend() {
|
||||
if (!sendTimer) {
|
||||
sendTimer = setTimeout(function() {
|
||||
sendTimer = null;
|
||||
sendBuffer();
|
||||
}, sendInterval);
|
||||
}
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
logEventBuffer.push(loggingEvent);
|
||||
if (sendInterval > 0) {
|
||||
scheduleSend();
|
||||
} else {
|
||||
sendBuffer();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return smtpAppender(config, layout);
|
||||
function getTransportOptions() {
|
||||
var transportOpts = null;
|
||||
if (config.SMTP) {
|
||||
transportOpts = config.SMTP;
|
||||
} else if (config.transport) {
|
||||
var plugin = config.transport.plugin || 'smtp';
|
||||
var transportModule = 'nodemailer-' + plugin + '-transport';
|
||||
var transporter = require(transportModule);
|
||||
transportOpts = transporter(config.transport.options);
|
||||
}
|
||||
|
||||
return transportOpts;
|
||||
}
|
||||
|
||||
function scheduleSend() {
|
||||
if (!sendTimer) {
|
||||
sendTimer = setTimeout(function () {
|
||||
sendTimer = null;
|
||||
sendBuffer();
|
||||
}, sendInterval);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||
* It can either send an email on each event or group several
|
||||
* logging events gathered during specified interval.
|
||||
*
|
||||
* @param _config appender configuration data
|
||||
* config.sendInterval time between log emails (in seconds), if 0
|
||||
* then every event sends an email
|
||||
* config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5).
|
||||
* @param _layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
*/
|
||||
function smtpAppender(_config, _layout) {
|
||||
config = _config;
|
||||
|
||||
if (!config.attachment) {
|
||||
config.attachment = {};
|
||||
}
|
||||
|
||||
config.attachment.enable = !!config.attachment.enable;
|
||||
config.attachment.message = config.attachment.message || "See logs as attachment";
|
||||
config.attachment.filename = config.attachment.filename || "default.log";
|
||||
layout = _layout || layouts.basicLayout;
|
||||
subjectLayout = layouts.messagePassThroughLayout;
|
||||
sendInterval = config.sendInterval * 1000 || 0;
|
||||
|
||||
shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000;
|
||||
|
||||
return function (loggingEvent) {
|
||||
unsentCount++;
|
||||
logEventBuffer.push(loggingEvent);
|
||||
if (sendInterval > 0) {
|
||||
scheduleSend();
|
||||
} else {
|
||||
sendBuffer();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(_config) {
|
||||
config = _config;
|
||||
if (_config.layout) {
|
||||
layout = layouts.layout(_config.layout.type, _config.layout);
|
||||
}
|
||||
return smtpAppender(_config, layout);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
if (shutdownTimeout > 0) {
|
||||
setTimeout(function () {
|
||||
if (sendTimer)
|
||||
clearTimeout(sendTimer);
|
||||
sendBuffer();
|
||||
}, shutdownTimeout);
|
||||
}
|
||||
(function checkDone() {
|
||||
if (unsentCount > 0) {
|
||||
setTimeout(checkDone, 100);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
exports.name = "smtp";
|
||||
exports.appender = smtpAppender;
|
||||
exports.configure = configure;
|
||||
|
||||
exports.shutdown = shutdown;
|
||||
|
||||
21
lib/appenders/stderr.js
Normal file
21
lib/appenders/stderr.js
Normal file
@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
|
||||
var layouts = require('../layouts');
|
||||
|
||||
function stderrAppender(layout, timezoneOffset) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
process.stderr.write(layout(loggingEvent, timezoneOffset) + '\n');
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return stderrAppender(layout, config.timezoneOffset);
|
||||
}
|
||||
|
||||
exports.appender = stderrAppender;
|
||||
exports.configure = configure;
|
||||
158
lib/connect-logger.js
Normal file → Executable file
158
lib/connect-logger.js
Normal file → Executable file
@ -1,8 +1,8 @@
|
||||
"use strict";
|
||||
var levels = require("./levels");
|
||||
var DEFAULT_FORMAT = ':remote-addr - -' +
|
||||
' ":method :url HTTP/:http-version"' +
|
||||
' :status :content-length ":referrer"' +
|
||||
var DEFAULT_FORMAT = ':remote-addr - -' +
|
||||
' ":method :url HTTP/:http-version"' +
|
||||
' :status :content-length ":referrer"' +
|
||||
' ":user-agent"';
|
||||
/**
|
||||
* Log requests with the given `options` or a `format` string.
|
||||
@ -52,16 +52,15 @@ function getLogger(logger4js, options) {
|
||||
// nologs
|
||||
if (nolog && nolog.test(req.originalUrl)) return next();
|
||||
if (thislogger.isLevelEnabled(level) || options.level === 'auto') {
|
||||
|
||||
|
||||
var start = new Date()
|
||||
, statusCode
|
||||
, writeHead = res.writeHead
|
||||
, end = res.end
|
||||
, url = req.originalUrl;
|
||||
|
||||
// flag as logging
|
||||
req._logging = true;
|
||||
|
||||
|
||||
// proxy for statusCode.
|
||||
res.writeHead = function(code, headers){
|
||||
res.writeHead = writeHead;
|
||||
@ -78,11 +77,9 @@ function getLogger(logger4js, options) {
|
||||
level = levels.toLevel(options.level, levels.INFO);
|
||||
}
|
||||
};
|
||||
|
||||
// proxy end to output a line to the provided logger.
|
||||
res.end = function(chunk, encoding) {
|
||||
res.end = end;
|
||||
res.end(chunk, encoding);
|
||||
|
||||
//hook on end request to emit the log entry of the HTTP request.
|
||||
res.on('finish', function() {
|
||||
res.responseTime = new Date() - start;
|
||||
//status code response level handling
|
||||
if(res.statusCode && options.level === 'auto'){
|
||||
@ -91,21 +88,108 @@ function getLogger(logger4js, options) {
|
||||
if(res.statusCode >= 400) level = levels.ERROR;
|
||||
}
|
||||
if (thislogger.isLevelEnabled(level)) {
|
||||
var combined_tokens = assemble_tokens(req, res, options.tokens || []);
|
||||
if (typeof fmt === 'function') {
|
||||
var line = fmt(req, res, function(str){ return format(str, req, res); });
|
||||
var line = fmt(req, res, function(str){ return format(str, combined_tokens); });
|
||||
if (line) thislogger.log(level, line);
|
||||
} else {
|
||||
thislogger.log(level, format(fmt, req, res));
|
||||
thislogger.log(level, format(fmt, combined_tokens));
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
//ensure next gets always called
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds custom {token, replacement} objects to defaults,
|
||||
* overwriting the defaults if any tokens clash
|
||||
*
|
||||
* @param {IncomingMessage} req
|
||||
* @param {ServerResponse} res
|
||||
* @param {Array} custom_tokens
|
||||
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
|
||||
* @return {Array}
|
||||
*/
|
||||
function assemble_tokens(req, res, custom_tokens) {
|
||||
var array_unique_tokens = function(array) {
|
||||
var a = array.concat();
|
||||
for(var i=0; i<a.length; ++i) {
|
||||
for(var j=i+1; j<a.length; ++j) {
|
||||
if(a[i].token == a[j].token) { // not === because token can be regexp object
|
||||
a.splice(j--, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
return a;
|
||||
};
|
||||
|
||||
var default_tokens = [];
|
||||
default_tokens.push({ token: ':url', replacement: getUrl(req) });
|
||||
default_tokens.push({ token: ':protocol', replacement: req.protocol });
|
||||
default_tokens.push({ token: ':hostname', replacement: req.hostname });
|
||||
default_tokens.push({ token: ':method', replacement: req.method });
|
||||
default_tokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode });
|
||||
default_tokens.push({ token: ':response-time', replacement: res.responseTime });
|
||||
default_tokens.push({ token: ':date', replacement: new Date().toUTCString() });
|
||||
default_tokens.push({
|
||||
token: ':referrer',
|
||||
replacement: req.headers.referer || req.headers.referrer || ''
|
||||
});
|
||||
default_tokens.push({
|
||||
token: ':http-version',
|
||||
replacement: req.httpVersionMajor + '.' + req.httpVersionMinor
|
||||
});
|
||||
default_tokens.push({
|
||||
token: ':remote-addr',
|
||||
replacement:
|
||||
req.headers['x-forwarded-for'] ||
|
||||
req.ip ||
|
||||
req._remoteAddress ||
|
||||
(req.socket &&
|
||||
(req.socket.remoteAddress ||
|
||||
(req.socket.socket && req.socket.socket.remoteAddress)
|
||||
)
|
||||
)
|
||||
}
|
||||
);
|
||||
default_tokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] });
|
||||
default_tokens.push({
|
||||
token: ':content-length',
|
||||
replacement:
|
||||
(res._headers && res._headers['content-length']) ||
|
||||
(res.__headers && res.__headers['Content-Length']) ||
|
||||
'-'
|
||||
}
|
||||
);
|
||||
default_tokens.push({ token: /:req\[([^\]]+)\]/g, replacement: function(_, field) {
|
||||
return req.headers[field.toLowerCase()];
|
||||
} });
|
||||
default_tokens.push({ token: /:res\[([^\]]+)\]/g, replacement: function(_, field) {
|
||||
return res._headers ?
|
||||
(res._headers[field.toLowerCase()] || res.__headers[field])
|
||||
: (res.__headers && res.__headers[field]);
|
||||
} });
|
||||
|
||||
return array_unique_tokens(custom_tokens.concat(default_tokens));
|
||||
}
|
||||
|
||||
/**
|
||||
* Return request url path,
|
||||
* adding this function prevents the Cyclomatic Complexity,
|
||||
* for the assemble_tokens function at low, to pass the tests.
|
||||
*
|
||||
* @param {IncomingMessage} req
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getUrl(req){
|
||||
return req.originalUrl || req.url;
|
||||
}
|
||||
/**
|
||||
* Return formatted log line.
|
||||
*
|
||||
@ -116,33 +200,11 @@ function getLogger(logger4js, options) {
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function format(str, req, res) {
|
||||
return str
|
||||
.replace(':url', req.originalUrl)
|
||||
.replace(':method', req.method)
|
||||
.replace(':status', res.__statusCode || res.statusCode)
|
||||
.replace(':response-time', res.responseTime)
|
||||
.replace(':date', new Date().toUTCString())
|
||||
.replace(':referrer', req.headers.referer || req.headers.referrer || '')
|
||||
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
|
||||
.replace(
|
||||
':remote-addr', req.ip || req._remoteAddress || (
|
||||
req.socket &&
|
||||
(req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))
|
||||
))
|
||||
.replace(':user-agent', req.headers['user-agent'] || '')
|
||||
.replace(
|
||||
':content-length',
|
||||
(res._headers && res._headers['content-length']) ||
|
||||
(res.__headers && res.__headers['Content-Length']) ||
|
||||
'-'
|
||||
)
|
||||
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
|
||||
.replace(/:res\[([^\]]+)\]/g, function(_, field){
|
||||
return res._headers ?
|
||||
(res._headers[field.toLowerCase()] || res.__headers[field])
|
||||
: (res.__headers && res.__headers[field]);
|
||||
});
|
||||
function format(str, tokens) {
|
||||
for (var i = 0; i < tokens.length; i++) {
|
||||
str = str.replace(tokens[i].token, tokens[i].replacement);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -158,9 +220,9 @@ function format(str, req, res) {
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
|
||||
* LOGGING http://example.com/hoge.agif
|
||||
* 1.2 in "\\.gif|\\.jpg$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and
|
||||
* NOT LOGGING http://example.com/hoge.gif and
|
||||
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
|
||||
* LOGGING http://example.com/hoge.agif,
|
||||
* LOGGING http://example.com/hoge.agif,
|
||||
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
|
||||
* 1.3 in "\\.(gif|jpe?g|png)$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
|
||||
@ -178,15 +240,15 @@ function createNoLogCondition(nolog) {
|
||||
if (nolog) {
|
||||
if (nolog instanceof RegExp) {
|
||||
regexp = nolog;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (typeof nolog === 'string') {
|
||||
regexp = new RegExp(nolog);
|
||||
}
|
||||
|
||||
|
||||
if (Array.isArray(nolog)) {
|
||||
var regexpsAsStrings = nolog.map(
|
||||
function convertToStrings(o) {
|
||||
function convertToStrings(o) {
|
||||
return o.source ? o.source : o;
|
||||
}
|
||||
);
|
||||
|
||||
@ -11,7 +11,7 @@ function padWithZeros(vNumber, width) {
|
||||
}
|
||||
return numAsString;
|
||||
}
|
||||
|
||||
|
||||
function addZero(vNumber) {
|
||||
return padWithZeros(vNumber, 2);
|
||||
}
|
||||
@ -21,9 +21,9 @@ function addZero(vNumber) {
|
||||
* Thanks to http://www.svendtofte.com/code/date_format/
|
||||
* @private
|
||||
*/
|
||||
function offset(date) {
|
||||
function offset(timezoneOffset) {
|
||||
// Difference to Greenwich time (GMT) in hours
|
||||
var os = Math.abs(date.getTimezoneOffset());
|
||||
var os = Math.abs(timezoneOffset);
|
||||
var h = String(Math.floor(os/60));
|
||||
var m = String(os%60);
|
||||
if (h.length == 1) {
|
||||
@ -32,26 +32,33 @@ function offset(date) {
|
||||
if (m.length == 1) {
|
||||
m = "0" + m;
|
||||
}
|
||||
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
|
||||
return timezoneOffset < 0 ? "+"+h+m : "-"+h+m;
|
||||
}
|
||||
|
||||
exports.asString = function(/*format,*/ date) {
|
||||
exports.asString = function(/*format,*/ date, timezoneOffset) {
|
||||
/*jshint -W071 */
|
||||
var format = exports.ISO8601_FORMAT;
|
||||
if (typeof(date) === "string") {
|
||||
format = arguments[0];
|
||||
date = arguments[1];
|
||||
timezoneOffset = arguments[2];
|
||||
}
|
||||
|
||||
var vDay = addZero(date.getDate());
|
||||
var vMonth = addZero(date.getMonth()+1);
|
||||
var vYearLong = addZero(date.getFullYear());
|
||||
var vYearShort = addZero(date.getFullYear().toString().substring(2,4));
|
||||
// make the date independent of the system timezone by working with UTC
|
||||
if (timezoneOffset === undefined) {
|
||||
timezoneOffset = date.getTimezoneOffset();
|
||||
}
|
||||
date.setUTCMinutes(date.getUTCMinutes() - timezoneOffset);
|
||||
var vDay = addZero(date.getUTCDate());
|
||||
var vMonth = addZero(date.getUTCMonth()+1);
|
||||
var vYearLong = addZero(date.getUTCFullYear());
|
||||
var vYearShort = addZero(date.getUTCFullYear().toString().substring(2,4));
|
||||
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
|
||||
var vHour = addZero(date.getHours());
|
||||
var vMinute = addZero(date.getMinutes());
|
||||
var vSecond = addZero(date.getSeconds());
|
||||
var vMillisecond = padWithZeros(date.getMilliseconds(), 3);
|
||||
var vTimeZone = offset(date);
|
||||
var vHour = addZero(date.getUTCHours());
|
||||
var vMinute = addZero(date.getUTCMinutes());
|
||||
var vSecond = addZero(date.getUTCSeconds());
|
||||
var vMillisecond = padWithZeros(date.getUTCMilliseconds(), 3);
|
||||
var vTimeZone = offset(timezoneOffset);
|
||||
date.setUTCMinutes(date.getUTCMinutes() + timezoneOffset);
|
||||
var formatted = format
|
||||
.replace(/dd/g, vDay)
|
||||
.replace(/MM/g, vMonth)
|
||||
@ -64,3 +71,4 @@ exports.asString = function(/*format,*/ date) {
|
||||
return formatted;
|
||||
|
||||
};
|
||||
/*jshint +W071 */
|
||||
|
||||
135
lib/layouts.js
135
lib/layouts.js
@ -3,31 +3,39 @@ var dateFormat = require('./date_format')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, util = require('util')
|
||||
, semver = require('semver')
|
||||
, replacementRegExp = /%[sdj]/g
|
||||
, layoutMakers = {
|
||||
"messagePassThrough": function() { return messagePassThroughLayout; },
|
||||
"basic": function() { return basicLayout; },
|
||||
"colored": function() { return colouredLayout; },
|
||||
"coloured": function() { return colouredLayout; },
|
||||
"messagePassThrough": function() { return messagePassThroughLayout; },
|
||||
"basic": function() { return basicLayout; },
|
||||
"colored": function() { return colouredLayout; },
|
||||
"coloured": function() { return colouredLayout; },
|
||||
"pattern": function (config) {
|
||||
return patternLayout(config && config.pattern, config && config.tokens);
|
||||
}
|
||||
},
|
||||
"dummy": function() { return dummyLayout; }
|
||||
}
|
||||
, colours = {
|
||||
ALL: "grey",
|
||||
TRACE: "blue",
|
||||
DEBUG: "cyan",
|
||||
INFO: "green",
|
||||
WARN: "yellow",
|
||||
ERROR: "red",
|
||||
FATAL: "magenta",
|
||||
ALL: "grey",
|
||||
TRACE: "blue",
|
||||
DEBUG: "cyan",
|
||||
INFO: "green",
|
||||
WARN: "yellow",
|
||||
ERROR: "red",
|
||||
FATAL: "magenta",
|
||||
OFF: "grey"
|
||||
};
|
||||
|
||||
function wrapErrorsWithInspect(items) {
|
||||
return items.map(function(item) {
|
||||
if ((item instanceof Error) && item.stack) {
|
||||
return { inspect: function() { return util.format(item) + '\n' + item.stack; } };
|
||||
return { inspect: function() {
|
||||
if (semver.satisfies(process.version, '>=6')) {
|
||||
return util.format(item);
|
||||
} else {
|
||||
return util.format(item) + '\n' + item.stack;
|
||||
}
|
||||
} };
|
||||
} else {
|
||||
return item;
|
||||
}
|
||||
@ -71,11 +79,11 @@ function colorize (str, style) {
|
||||
return colorizeStart(style) + str + colorizeEnd(style);
|
||||
}
|
||||
|
||||
function timestampLevelAndCategory(loggingEvent, colour) {
|
||||
function timestampLevelAndCategory(loggingEvent, colour, timezoneOffest) {
|
||||
var output = colorize(
|
||||
formatLogData(
|
||||
'[%s] [%s] %s - '
|
||||
, dateFormat.asString(loggingEvent.startTime)
|
||||
, dateFormat.asString(loggingEvent.startTime, timezoneOffest)
|
||||
, loggingEvent.level
|
||||
, loggingEvent.categoryName
|
||||
)
|
||||
@ -93,18 +101,23 @@ function timestampLevelAndCategory(loggingEvent, colour) {
|
||||
*
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function basicLayout (loggingEvent) {
|
||||
return timestampLevelAndCategory(loggingEvent) + formatLogData(loggingEvent.data);
|
||||
function basicLayout (loggingEvent, timezoneOffset) {
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
undefined,
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
/**
|
||||
* colouredLayout - taken from masylum's fork.
|
||||
* same as basicLayout, but with colours.
|
||||
*/
|
||||
function colouredLayout (loggingEvent) {
|
||||
function colouredLayout (loggingEvent, timezoneOffset) {
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
colours[loggingEvent.level.toString()]
|
||||
colours[loggingEvent.level.toString()],
|
||||
timezoneOffset
|
||||
) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
@ -112,6 +125,10 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
return formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
function dummyLayout(loggingEvent) {
|
||||
return loggingEvent.data[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* PatternLayout
|
||||
* Format for specifiers is %[padding].[truncation][field]{[format]}
|
||||
@ -129,7 +146,7 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
|
||||
* You can use %[ and %] to define a colored block.
|
||||
*
|
||||
* Tokens are specified as simple key:value objects.
|
||||
* Tokens are specified as simple key:value objects.
|
||||
* The key represents the token name whereas the value can be a string or function
|
||||
* which is called to extract the value to put in the log message. If token is not
|
||||
* found, it doesn't replace the field.
|
||||
@ -139,14 +156,16 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
* Takes a pattern string, array of tokens and returns a layout function.
|
||||
* @param {String} Log format pattern String
|
||||
* @param {object} map object of different tokens
|
||||
* @param {number} timezone offset in minutes
|
||||
* @return {Function}
|
||||
* @author Stephan Strittmatter
|
||||
* @author Jan Schmidle
|
||||
*/
|
||||
function patternLayout (pattern, tokens) {
|
||||
function patternLayout (pattern, tokens, timezoneOffset) {
|
||||
// jshint maxstatements:22
|
||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzx%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzxy%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
|
||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||
|
||||
function categoryName(loggingEvent, specifier) {
|
||||
@ -169,7 +188,7 @@ function patternLayout (pattern, tokens) {
|
||||
if (format == "ISO8601") {
|
||||
format = dateFormat.ISO8601_FORMAT;
|
||||
} else if (format == "ISO8601_WITH_TZ_OFFSET") {
|
||||
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
|
||||
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
|
||||
} else if (format == "ABSOLUTE") {
|
||||
format = dateFormat.ABSOLUTETIME_FORMAT;
|
||||
} else if (format == "DATE") {
|
||||
@ -177,9 +196,9 @@ function patternLayout (pattern, tokens) {
|
||||
}
|
||||
}
|
||||
// Format the date
|
||||
return dateFormat.asString(format, loggingEvent.startTime);
|
||||
return dateFormat.asString(format, loggingEvent.startTime, timezoneOffset);
|
||||
}
|
||||
|
||||
|
||||
function hostname() {
|
||||
return os.hostname().toString();
|
||||
}
|
||||
@ -187,7 +206,7 @@ function patternLayout (pattern, tokens) {
|
||||
function formatMessage(loggingEvent) {
|
||||
return formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
|
||||
function endOfLine() {
|
||||
return eol;
|
||||
}
|
||||
@ -197,7 +216,7 @@ function patternLayout (pattern, tokens) {
|
||||
}
|
||||
|
||||
function startTime(loggingEvent) {
|
||||
return "" + loggingEvent.startTime.toLocaleTimeString();
|
||||
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime, timezoneOffset);
|
||||
}
|
||||
|
||||
function startColour(loggingEvent) {
|
||||
@ -212,8 +231,25 @@ function patternLayout (pattern, tokens) {
|
||||
return '%';
|
||||
}
|
||||
|
||||
function pid() {
|
||||
return process.pid;
|
||||
function pid(loggingEvent) {
|
||||
if (loggingEvent && loggingEvent.pid) {
|
||||
return loggingEvent.pid;
|
||||
} else {
|
||||
return process.pid;
|
||||
}
|
||||
}
|
||||
|
||||
function clusterInfo(loggingEvent, specifier) {
|
||||
if (loggingEvent.cluster && specifier) {
|
||||
return specifier
|
||||
.replace('%m', loggingEvent.cluster.master)
|
||||
.replace('%w', loggingEvent.cluster.worker)
|
||||
.replace('%i', loggingEvent.cluster.workerId);
|
||||
} else if (loggingEvent.cluster) {
|
||||
return loggingEvent.cluster.worker+'@'+loggingEvent.cluster.master;
|
||||
} else {
|
||||
return pid();
|
||||
}
|
||||
}
|
||||
|
||||
function userDefined(loggingEvent, specifier) {
|
||||
@ -237,6 +273,7 @@ function patternLayout (pattern, tokens) {
|
||||
'r': startTime,
|
||||
'[': startColour,
|
||||
']': endColour,
|
||||
'y': clusterInfo,
|
||||
'z': pid,
|
||||
'%': percent,
|
||||
'x': userDefined
|
||||
@ -275,12 +312,19 @@ function patternLayout (pattern, tokens) {
|
||||
}
|
||||
return toPad;
|
||||
}
|
||||
|
||||
|
||||
function truncateAndPad(toTruncAndPad, truncation, padding) {
|
||||
var replacement = toTruncAndPad;
|
||||
replacement = truncate(truncation, replacement);
|
||||
replacement = pad(padding, replacement);
|
||||
return replacement;
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var formattedString = "";
|
||||
var result;
|
||||
var searchString = pattern;
|
||||
|
||||
|
||||
while ((result = regex.exec(searchString))) {
|
||||
var matchedString = result[0];
|
||||
var padding = result[1];
|
||||
@ -288,22 +332,15 @@ function patternLayout (pattern, tokens) {
|
||||
var conversionCharacter = result[3];
|
||||
var specifier = result[5];
|
||||
var text = result[6];
|
||||
|
||||
|
||||
// Check if the pattern matched was just normal text
|
||||
if (text) {
|
||||
formattedString += "" + text;
|
||||
} else {
|
||||
// Create a raw replacement string based on the conversion
|
||||
// character and specifier
|
||||
var replacement =
|
||||
replaceToken(conversionCharacter, loggingEvent, specifier) ||
|
||||
matchedString;
|
||||
|
||||
// Format the replacement according to any padding or
|
||||
// truncation specified
|
||||
replacement = truncate(truncation, replacement);
|
||||
replacement = pad(padding, replacement);
|
||||
formattedString += replacement;
|
||||
var replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
|
||||
formattedString += truncateAndPad(replacement, truncation, padding);
|
||||
}
|
||||
searchString = searchString.substr(result.index + result[0].length);
|
||||
}
|
||||
@ -313,11 +350,15 @@ function patternLayout (pattern, tokens) {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
basicLayout: basicLayout,
|
||||
messagePassThroughLayout: messagePassThroughLayout,
|
||||
patternLayout: patternLayout,
|
||||
colouredLayout: colouredLayout,
|
||||
coloredLayout: colouredLayout,
|
||||
basicLayout: basicLayout,
|
||||
messagePassThroughLayout: messagePassThroughLayout,
|
||||
patternLayout: patternLayout,
|
||||
colouredLayout: colouredLayout,
|
||||
coloredLayout: colouredLayout,
|
||||
dummyLayout: dummyLayout,
|
||||
addLayout: function(name, serializerGenerator) {
|
||||
layoutMakers[name] = serializerGenerator;
|
||||
},
|
||||
layout: function(name, config) {
|
||||
return layoutMakers[name] && layoutMakers[name](config);
|
||||
}
|
||||
|
||||
@ -13,20 +13,16 @@ function Level(level, levelStr) {
|
||||
* @type Log4js.Level
|
||||
*/
|
||||
function toLevel(sArg, defaultLevel) {
|
||||
|
||||
if (!sArg) {
|
||||
return defaultLevel;
|
||||
}
|
||||
|
||||
if (typeof sArg == "string") {
|
||||
var s = sArg.toUpperCase();
|
||||
if (module.exports[s]) {
|
||||
return module.exports[s];
|
||||
} else {
|
||||
return defaultLevel;
|
||||
}
|
||||
if (sArg instanceof Level) {
|
||||
module.exports[sArg.toString()] = sArg;
|
||||
return sArg;
|
||||
}
|
||||
if (typeof sArg === "string") {
|
||||
return module.exports[sArg.toUpperCase()] || defaultLevel;
|
||||
}
|
||||
|
||||
return toLevel(sArg.toString());
|
||||
}
|
||||
|
||||
@ -49,20 +45,22 @@ Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
|
||||
};
|
||||
|
||||
Level.prototype.isEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel == "string") {
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level === otherLevel.level;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
ALL: new Level(Number.MIN_VALUE, "ALL"),
|
||||
TRACE: new Level(5000, "TRACE"),
|
||||
DEBUG: new Level(10000, "DEBUG"),
|
||||
INFO: new Level(20000, "INFO"),
|
||||
WARN: new Level(30000, "WARN"),
|
||||
ERROR: new Level(40000, "ERROR"),
|
||||
FATAL: new Level(50000, "FATAL"),
|
||||
OFF: new Level(Number.MAX_VALUE, "OFF"),
|
||||
toLevel: toLevel
|
||||
ALL: new Level(Number.MIN_VALUE, "ALL"),
|
||||
TRACE: new Level(5000, "TRACE"),
|
||||
DEBUG: new Level(10000, "DEBUG"),
|
||||
INFO: new Level(20000, "INFO"),
|
||||
WARN: new Level(30000, "WARN"),
|
||||
ERROR: new Level(40000, "ERROR"),
|
||||
FATAL: new Level(50000, "FATAL"),
|
||||
MARK: new Level(9007199254740992, "MARK"), // 2^53
|
||||
OFF: new Level(Number.MAX_VALUE, "OFF"),
|
||||
toLevel: toLevel,
|
||||
Level: Level
|
||||
};
|
||||
|
||||
174
lib/log4js.js
174
lib/log4js.js
@ -44,7 +44,6 @@
|
||||
* Website: http://log4js.berlios.de
|
||||
*/
|
||||
var events = require('events')
|
||||
, async = require('async')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, util = require('util')
|
||||
@ -65,10 +64,30 @@ var events = require('events')
|
||||
replaceConsole: false
|
||||
};
|
||||
|
||||
require('./appenders/console');
|
||||
|
||||
function hasLogger(logger) {
|
||||
return loggers.hasOwnProperty(logger);
|
||||
}
|
||||
|
||||
levels.forName = function(levelStr, levelVal) {
|
||||
var level;
|
||||
if (typeof levelStr === "string" && typeof levelVal === "number") {
|
||||
var levelUpper = levelStr.toUpperCase();
|
||||
level = new levels.Level(levelVal, levelUpper);
|
||||
loggerModule.addLevelMethods(level);
|
||||
}
|
||||
return level;
|
||||
};
|
||||
|
||||
levels.getLevel = function(levelStr) {
|
||||
var level;
|
||||
if (typeof levelStr === "string") {
|
||||
var levelUpper = levelStr.toUpperCase();
|
||||
level = levels.toLevel(levelStr);
|
||||
}
|
||||
return level;
|
||||
};
|
||||
|
||||
function getBufferedLogger(categoryName) {
|
||||
var base_logger = getLogger(categoryName);
|
||||
@ -92,6 +111,22 @@ function getBufferedLogger(categoryName) {
|
||||
return logger;
|
||||
}
|
||||
|
||||
function normalizeCategory (category) {
|
||||
return category + '.';
|
||||
}
|
||||
|
||||
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
|
||||
var normalizedLevelCategory = normalizeCategory(levelCategory);
|
||||
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
|
||||
return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory; //jshint ignore:line
|
||||
}
|
||||
|
||||
function doesAppenderContainsLogger (appenderCategory, loggerCategory) {
|
||||
var normalizedAppenderCategory = normalizeCategory(appenderCategory);
|
||||
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
|
||||
return normalizedLoggerCategory.substring(0, normalizedAppenderCategory.length) == normalizedAppenderCategory; //jshint ignore:line
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get a logger instance. Instance is cached on categoryName level.
|
||||
@ -99,32 +134,57 @@ function getBufferedLogger(categoryName) {
|
||||
* @return {Logger} instance of logger for the category
|
||||
* @static
|
||||
*/
|
||||
function getLogger (categoryName) {
|
||||
function getLogger (loggerCategoryName) {
|
||||
|
||||
// Use default logger if categoryName is not specified or invalid
|
||||
if (typeof categoryName !== "string") {
|
||||
categoryName = Logger.DEFAULT_CATEGORY;
|
||||
if (typeof loggerCategoryName !== "string") {
|
||||
loggerCategoryName = Logger.DEFAULT_CATEGORY;
|
||||
}
|
||||
|
||||
var appenderList;
|
||||
if (!hasLogger(categoryName)) {
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[categoryName] = new Logger(categoryName);
|
||||
if (appenders[categoryName]) {
|
||||
appenderList = appenders[categoryName];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
if (!hasLogger(loggerCategoryName)) {
|
||||
|
||||
var level;
|
||||
|
||||
/* jshint -W073 */
|
||||
// If there's a "levels" entry in the configuration
|
||||
if (levels.config) {
|
||||
// Goes through the categories in the levels configuration entry,
|
||||
// starting with the "higher" ones.
|
||||
var keys = Object.keys(levels.config).sort();
|
||||
for (var idx = 0; idx < keys.length; idx++) {
|
||||
var levelCategory = keys[idx];
|
||||
if (doesLevelEntryContainsLogger(levelCategory, loggerCategoryName)) {
|
||||
// level for the logger
|
||||
level = levels.config[levelCategory];
|
||||
}
|
||||
}
|
||||
}
|
||||
/* jshint +W073 */
|
||||
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
|
||||
|
||||
/* jshint -W083 */
|
||||
var appenderList;
|
||||
for(var appenderCategory in appenders) {
|
||||
if (doesAppenderContainsLogger(appenderCategory, loggerCategoryName)) {
|
||||
appenderList = appenders[appenderCategory];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[loggerCategoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
}
|
||||
/* jshint +W083 */
|
||||
|
||||
if (appenders[ALL_CATEGORIES]) {
|
||||
appenderList = appenders[ALL_CATEGORIES];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
loggers[loggerCategoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return loggers[categoryName];
|
||||
return loggers[loggerCategoryName];
|
||||
}
|
||||
|
||||
/**
|
||||
@ -141,13 +201,19 @@ function addAppender () {
|
||||
args = args[0];
|
||||
}
|
||||
|
||||
args.forEach(function(category) {
|
||||
addAppenderToCategory(appender, category);
|
||||
args.forEach(function(appenderCategory) {
|
||||
addAppenderToCategory(appender, appenderCategory);
|
||||
|
||||
if (category === ALL_CATEGORIES) {
|
||||
if (appenderCategory === ALL_CATEGORIES) {
|
||||
addAppenderToAllLoggers(appender);
|
||||
} else if (hasLogger(category)) {
|
||||
loggers[category].addListener("log", appender);
|
||||
} else {
|
||||
|
||||
for(var loggerCategory in loggers) {
|
||||
if (doesAppenderContainsLogger(appenderCategory,loggerCategory)) {
|
||||
loggers[loggerCategory].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -193,15 +259,22 @@ function configureAppenders(appenderList, options) {
|
||||
}
|
||||
}
|
||||
|
||||
function configureLevels(levels) {
|
||||
if (levels) {
|
||||
for (var category in levels) {
|
||||
if (levels.hasOwnProperty(category)) {
|
||||
if(category === ALL_CATEGORIES) {
|
||||
setGlobalLogLevel(levels[category]);
|
||||
}
|
||||
getLogger(category).setLevel(levels[category]);
|
||||
function configureLevels(_levels) {
|
||||
levels.config = _levels; // Keep it so we can create loggers later using this cfg
|
||||
if (_levels) {
|
||||
var keys = Object.keys(levels.config).sort();
|
||||
for (var idx in keys) {
|
||||
var category = keys[idx];
|
||||
if(category === ALL_CATEGORIES) {
|
||||
setGlobalLogLevel(_levels[category]);
|
||||
}
|
||||
/* jshint -W073 */
|
||||
for(var loggerCategory in loggers) {
|
||||
if (doesLevelEntryContainsLogger(category, loggerCategory)) {
|
||||
loggers[loggerCategory].setLevel(_levels[category]);
|
||||
}
|
||||
}
|
||||
/* jshint +W073 */
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -231,8 +304,8 @@ function loadConfigurationFile(filename) {
|
||||
function configureOnceOff(config, options) {
|
||||
if (config) {
|
||||
try {
|
||||
configureAppenders(config.appenders, options);
|
||||
configureLevels(config.levels);
|
||||
configureAppenders(config.appenders, options);
|
||||
|
||||
if (config.replaceConsole) {
|
||||
replaceConsole();
|
||||
@ -248,12 +321,12 @@ function configureOnceOff(config, options) {
|
||||
}
|
||||
}
|
||||
|
||||
function reloadConfiguration() {
|
||||
function reloadConfiguration(options) {
|
||||
var mtime = getMTime(configState.filename);
|
||||
if (!mtime) return;
|
||||
|
||||
|
||||
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
|
||||
configureOnceOff(loadConfigurationFile(configState.filename));
|
||||
configureOnceOff(loadConfigurationFile(configState.filename), options);
|
||||
}
|
||||
configState.lastMTime = mtime;
|
||||
}
|
||||
@ -275,7 +348,7 @@ function initReloadConfiguration(filename, options) {
|
||||
}
|
||||
configState.filename = filename;
|
||||
configState.lastMTime = getMTime(filename);
|
||||
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000);
|
||||
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000, options);
|
||||
}
|
||||
|
||||
function configure(configurationFileOrObject, options) {
|
||||
@ -381,20 +454,26 @@ function shutdown(cb) {
|
||||
// not being able to be drained because of run-away log writes.
|
||||
loggerModule.disableAllLogWrites();
|
||||
|
||||
// Next, get all the shutdown functions for appenders as an array.
|
||||
var shutdownFunctions = Object.keys(appenderShutdowns).reduce(
|
||||
function(accum, category) {
|
||||
return accum.concat(appenderShutdowns[category]);
|
||||
}, []);
|
||||
|
||||
// Call each of the shutdown functions.
|
||||
async.each(
|
||||
shutdownFunctions,
|
||||
function(shutdownFn, done) {
|
||||
shutdownFn(done);
|
||||
},
|
||||
cb
|
||||
);
|
||||
// Call each of the shutdown functions in parallel
|
||||
var completed = 0;
|
||||
var error;
|
||||
var shutdownFcts = [];
|
||||
var complete = function(err) {
|
||||
error = error || err;
|
||||
completed++;
|
||||
if (completed >= shutdownFcts.length) {
|
||||
cb(error);
|
||||
}
|
||||
};
|
||||
for (var category in appenderShutdowns) {
|
||||
if (appenderShutdowns.hasOwnProperty(category)) {
|
||||
shutdownFcts.push(appenderShutdowns[category]);
|
||||
}
|
||||
}
|
||||
if (!shutdownFcts.length) {
|
||||
return cb();
|
||||
}
|
||||
shutdownFcts.forEach(function(shutdownFct) { shutdownFct(complete); });
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
@ -423,4 +502,3 @@ module.exports = {
|
||||
|
||||
//set ourselves up
|
||||
configure();
|
||||
|
||||
|
||||
@ -32,7 +32,7 @@ function LoggingEvent (categoryName, level, data, logger) {
|
||||
*/
|
||||
function Logger (name, level) {
|
||||
this.category = name || DEFAULT_CATEGORY;
|
||||
|
||||
|
||||
if (level) {
|
||||
this.setLevel(level);
|
||||
}
|
||||
@ -50,36 +50,56 @@ Logger.prototype.removeLevel = function() {
|
||||
};
|
||||
|
||||
Logger.prototype.log = function() {
|
||||
var args = Array.prototype.slice.call(arguments)
|
||||
, logLevel = levels.toLevel(args.shift())
|
||||
, loggingEvent;
|
||||
if (this.isLevelEnabled(logLevel)) {
|
||||
loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
||||
this.emit("log", loggingEvent);
|
||||
var logLevel = levels.toLevel(arguments[0], levels.INFO);
|
||||
if (!this.isLevelEnabled(logLevel)) {
|
||||
return;
|
||||
}
|
||||
var numArgs = arguments.length - 1;
|
||||
var args = new Array(numArgs);
|
||||
for (var i = 0; i < numArgs; i++) {
|
||||
args[i] = arguments[i + 1];
|
||||
}
|
||||
this._log(logLevel, args);
|
||||
};
|
||||
|
||||
Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
return this.level.isLessThanOrEqualTo(otherLevel);
|
||||
};
|
||||
|
||||
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
|
||||
['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(
|
||||
function(levelString) {
|
||||
var level = levels.toLevel(levelString);
|
||||
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
||||
return this.isLevelEnabled(level);
|
||||
};
|
||||
|
||||
Logger.prototype[levelString.toLowerCase()] = function () {
|
||||
if (logWritesEnabled && this.isLevelEnabled(level)) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args.unshift(level);
|
||||
Logger.prototype.log.apply(this, args);
|
||||
}
|
||||
};
|
||||
addLevelMethods(levelString);
|
||||
}
|
||||
);
|
||||
|
||||
function addLevelMethods(level) {
|
||||
level = levels.toLevel(level);
|
||||
|
||||
var levelStrLower = level.toString().toLowerCase();
|
||||
var levelMethod = levelStrLower.replace(/_([a-z])/g, function(g) { return g[1].toUpperCase(); } );
|
||||
var isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
|
||||
|
||||
Logger.prototype['is'+isLevelMethod+'Enabled'] = function() {
|
||||
return this.isLevelEnabled(level.toString());
|
||||
};
|
||||
|
||||
Logger.prototype[levelMethod] = function () {
|
||||
if (logWritesEnabled && this.isLevelEnabled(level)) {
|
||||
var numArgs = arguments.length;
|
||||
var args = new Array(numArgs);
|
||||
for (var i = 0; i < numArgs; i++) {
|
||||
args[i] = arguments[i];
|
||||
}
|
||||
this._log(level, args);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Logger.prototype._log = function(level, data) {
|
||||
var loggingEvent = new LoggingEvent(this.category, level, data, this);
|
||||
this.emit('log', loggingEvent);
|
||||
};
|
||||
|
||||
/**
|
||||
* Disable all log writes.
|
||||
* @returns {void}
|
||||
@ -100,3 +120,4 @@ exports.LoggingEvent = LoggingEvent;
|
||||
exports.Logger = Logger;
|
||||
exports.disableAllLogWrites = disableAllLogWrites;
|
||||
exports.enableAllLogWrites = enableAllLogWrites;
|
||||
exports.addLevelMethods = addLevelMethods;
|
||||
@ -16,7 +16,11 @@ module.exports = BaseRollingFileStream;
|
||||
function BaseRollingFileStream(filename, options) {
|
||||
debug("In BaseRollingFileStream");
|
||||
this.filename = filename;
|
||||
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
||||
this.options = options || {};
|
||||
this.options.encoding = this.options.encoding || 'utf8';
|
||||
this.options.mode = this.options.mode || parseInt('0644', 8);
|
||||
this.options.flags = this.options.flags || 'a';
|
||||
|
||||
this.currentSize = 0;
|
||||
|
||||
function currentFileSize(file) {
|
||||
|
||||
@ -2,12 +2,15 @@
|
||||
var BaseRollingFileStream = require('./BaseRollingFileStream')
|
||||
, debug = require('../debug')('DateRollingFileStream')
|
||||
, format = require('../date_format')
|
||||
, async = require('async')
|
||||
, fs = require('fs')
|
||||
, util = require('util');
|
||||
|
||||
module.exports = DateRollingFileStream;
|
||||
|
||||
function findTimestampFromFileIfExists(filename, now) {
|
||||
return fs.existsSync(filename) ? fs.statSync(filename).mtime : new Date(now());
|
||||
}
|
||||
|
||||
function DateRollingFileStream(filename, pattern, options, now) {
|
||||
debug("Now is " + now);
|
||||
if (pattern && typeof(pattern) === 'object') {
|
||||
@ -17,29 +20,26 @@ function DateRollingFileStream(filename, pattern, options, now) {
|
||||
}
|
||||
this.pattern = pattern || '.yyyy-MM-dd';
|
||||
this.now = now || Date.now;
|
||||
|
||||
if (fs.existsSync(filename)) {
|
||||
var stat = fs.statSync(filename);
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, stat.mtime);
|
||||
} else {
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||
}
|
||||
this.lastTimeWeWroteSomething = format.asString(
|
||||
this.pattern,
|
||||
findTimestampFromFileIfExists(filename, this.now)
|
||||
);
|
||||
|
||||
this.baseFilename = filename;
|
||||
this.alwaysIncludePattern = false;
|
||||
|
||||
|
||||
if (options) {
|
||||
if (options.alwaysIncludePattern) {
|
||||
this.alwaysIncludePattern = true;
|
||||
filename = this.baseFilename + this.lastTimeWeWroteSomething;
|
||||
}
|
||||
delete options.alwaysIncludePattern;
|
||||
if (Object.keys(options).length === 0) {
|
||||
options = null;
|
||||
if (Object.keys(options).length === 0) {
|
||||
options = null;
|
||||
}
|
||||
}
|
||||
debug("this.now is " + this.now + ", now is " + now);
|
||||
|
||||
|
||||
DateRollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||
@ -47,37 +47,33 @@ util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||
DateRollingFileStream.prototype.shouldRoll = function() {
|
||||
var lastTime = this.lastTimeWeWroteSomething,
|
||||
thisTime = format.asString(this.pattern, new Date(this.now()));
|
||||
|
||||
debug("DateRollingFileStream.shouldRoll with now = " +
|
||||
|
||||
debug("DateRollingFileStream.shouldRoll with now = " +
|
||||
this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
|
||||
|
||||
|
||||
this.lastTimeWeWroteSomething = thisTime;
|
||||
this.previousTime = lastTime;
|
||||
|
||||
|
||||
return thisTime !== lastTime;
|
||||
};
|
||||
|
||||
DateRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this;
|
||||
|
||||
|
||||
debug("Starting roll");
|
||||
|
||||
|
||||
if (this.alwaysIncludePattern) {
|
||||
this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
|
||||
async.series([
|
||||
this.closeTheStream.bind(this),
|
||||
this.openTheStream.bind(this)
|
||||
], callback);
|
||||
this.closeTheStream(this.openTheStream.bind(this, callback));
|
||||
} else {
|
||||
var newFilename = this.baseFilename + this.previousTime;
|
||||
async.series([
|
||||
this.closeTheStream.bind(this),
|
||||
deleteAnyExistingFile,
|
||||
renameTheCurrentFile,
|
||||
this.openTheStream.bind(this)
|
||||
], callback);
|
||||
this.closeTheStream(
|
||||
deleteAnyExistingFile.bind(null,
|
||||
renameTheCurrentFile.bind(null,
|
||||
this.openTheStream.bind(this,
|
||||
callback))));
|
||||
}
|
||||
|
||||
|
||||
function deleteAnyExistingFile(cb) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
|
||||
@ -3,8 +3,9 @@ var BaseRollingFileStream = require('./BaseRollingFileStream')
|
||||
, debug = require('../debug')('RollingFileStream')
|
||||
, util = require('util')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, async = require('async');
|
||||
, child_process = require('child_process')
|
||||
, zlib = require("zlib")
|
||||
, fs = require('fs');
|
||||
|
||||
module.exports = RollingFileStream;
|
||||
|
||||
@ -25,7 +26,7 @@ function RollingFileStream (filename, size, backups, options) {
|
||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||
|
||||
RollingFileStream.prototype.shouldRoll = function() {
|
||||
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
||||
debug("should roll with current size " + this.currentSize + " and max size " + this.size);
|
||||
return this.currentSize >= this.size;
|
||||
};
|
||||
|
||||
@ -38,6 +39,7 @@ RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
}
|
||||
|
||||
function index(filename_) {
|
||||
debug('Calculating index of '+filename_);
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
@ -51,16 +53,42 @@ RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
}
|
||||
}
|
||||
|
||||
function compress (filename, cb) {
|
||||
|
||||
var gzip = zlib.createGzip();
|
||||
var inp = fs.createReadStream(filename);
|
||||
var out = fs.createWriteStream(filename+".gz");
|
||||
inp.pipe(gzip).pipe(out);
|
||||
fs.unlink(filename, cb);
|
||||
|
||||
}
|
||||
|
||||
function increaseFileIndex (fileToRename, cb) {
|
||||
var idx = index(fileToRename);
|
||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||
if (idx < that.backups) {
|
||||
|
||||
var ext = path.extname(fileToRename);
|
||||
var destination = filename + '.' + (idx+1);
|
||||
if (that.options.compress && /^gz$/.test(ext.substring(1))) {
|
||||
destination+=ext;
|
||||
}
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
||||
fs.unlink(destination, function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
||||
debug('Renaming ' + fileToRename + ' -> ' + destination);
|
||||
fs.rename(path.join(path.dirname(filename), fileToRename), destination, function(err) {
|
||||
if (err) {
|
||||
cb(err);
|
||||
} else {
|
||||
if (that.options.compress && ext!=".gz") {
|
||||
compress(destination, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
@ -71,19 +99,19 @@ RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
fs.readdir(path.dirname(filename), function (err, files) {
|
||||
async.eachSeries(
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
||||
increaseFileIndex,
|
||||
cb
|
||||
);
|
||||
var filesToProcess = files.filter(justTheseFiles).sort(byIndex);
|
||||
(function processOne(err) {
|
||||
var file = filesToProcess.pop();
|
||||
if (!file || err) { return cb(err); }
|
||||
increaseFileIndex(file, processOne);
|
||||
})();
|
||||
});
|
||||
}
|
||||
|
||||
debug("Rolling, rolling, rolling");
|
||||
async.series([
|
||||
this.closeTheStream.bind(this),
|
||||
renameTheFiles,
|
||||
this.openTheStream.bind(this)
|
||||
], callback);
|
||||
this.closeTheStream(
|
||||
renameTheFiles.bind(null,
|
||||
this.openTheStream.bind(this,
|
||||
callback)));
|
||||
|
||||
};
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
"use strict";
|
||||
exports.RollingFileStream = require('./RollingFileStream');
|
||||
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
||||
|
||||
10
package.json
10
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "log4js",
|
||||
"version": "0.6.21",
|
||||
"version": "0.6.38",
|
||||
"description": "Port of Log4js to work with node.",
|
||||
"keywords": [
|
||||
"logging",
|
||||
@ -22,6 +22,7 @@
|
||||
"node": ">=0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "jshint lib/ test/",
|
||||
"test": "vows"
|
||||
},
|
||||
"directories": {
|
||||
@ -29,14 +30,13 @@
|
||||
"lib": "lib"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "~0.2.0",
|
||||
"readable-stream": "~1.0.2",
|
||||
"semver": "~1.1.4"
|
||||
"semver": "~4.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vows": "0.7.0",
|
||||
"jshint": "^2.9.2",
|
||||
"sandboxed-module": "0.1.3",
|
||||
"underscore": "1.2.1"
|
||||
"vows": "0.7.0"
|
||||
},
|
||||
"browser": {
|
||||
"os": false
|
||||
|
||||
@ -12,80 +12,112 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
|
||||
var registeredClusterEvents = [];
|
||||
var loggingEvents = [];
|
||||
|
||||
// Fake cluster module, so no cluster listeners be really added
|
||||
var onChildProcessForked;
|
||||
var onMasterReceiveChildMessage;
|
||||
|
||||
// Fake cluster module, so no real cluster listeners be really added
|
||||
var fakeCluster = {
|
||||
|
||||
|
||||
on: function(event, callback) {
|
||||
registeredClusterEvents.push(event);
|
||||
onChildProcessForked = callback;
|
||||
},
|
||||
|
||||
|
||||
isMaster: true,
|
||||
isWorker: false,
|
||||
|
||||
|
||||
};
|
||||
|
||||
var fakeWorker = {
|
||||
on: function(event, callback) {
|
||||
onMasterReceiveChildMessage = callback;
|
||||
},
|
||||
process: {
|
||||
pid: 123
|
||||
},
|
||||
id: 'workerid'
|
||||
};
|
||||
|
||||
var fakeActualAppender = function(loggingEvent) {
|
||||
loggingEvents.push(loggingEvent);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
var masterAppender = appenderModule.appender({
|
||||
actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
|
||||
appenders: [{}, {category: "test"}, {category: "wovs"}]
|
||||
appenders: [{}, {category: "test"}, {category: "wovs"}]
|
||||
});
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
|
||||
|
||||
|
||||
// Simulate a 'fork' event to register the master's message handler on our fake worker.
|
||||
onChildProcessForked(fakeWorker);
|
||||
// Simulate a cluster message received by the masterAppender.
|
||||
var simulatedLoggingEvent = new LoggingEvent(
|
||||
'wovs',
|
||||
'Error',
|
||||
[
|
||||
'message deserialization test',
|
||||
{stack: 'my wrapped stack'}
|
||||
]
|
||||
);
|
||||
onMasterReceiveChildMessage({
|
||||
type : '::log-message',
|
||||
event : JSON.stringify(simulatedLoggingEvent)
|
||||
});
|
||||
|
||||
var returnValue = {
|
||||
registeredClusterEvents: registeredClusterEvents,
|
||||
loggingEvents: loggingEvents,
|
||||
};
|
||||
|
||||
|
||||
return returnValue;
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
assert.equal(topic.registeredClusterEvents[0], 'fork');
|
||||
},
|
||||
|
||||
|
||||
"should log using actual appender": function(topic) {
|
||||
assert.equal(topic.loggingEvents.length, 2)
|
||||
assert.equal(topic.loggingEvents.length, 4);
|
||||
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[2].data[0], 'message deserialization test');
|
||||
assert.equal(topic.loggingEvents[2].data[1], 'my wrapped stack');
|
||||
assert.equal(topic.loggingEvents[3].data[0], 'message deserialization test');
|
||||
assert.equal(topic.loggingEvents[3].data[1], 'my wrapped stack');
|
||||
},
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
'when in worker mode': {
|
||||
|
||||
|
||||
topic: function() {
|
||||
|
||||
|
||||
var registeredProcessEvents = [];
|
||||
|
||||
|
||||
// Fake cluster module, to fake we're inside a worker process
|
||||
var fakeCluster = {
|
||||
|
||||
|
||||
isMaster: false,
|
||||
isWorker: true,
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
var fakeProcess = {
|
||||
|
||||
|
||||
send: function(data) {
|
||||
registeredProcessEvents.push(data);
|
||||
},
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
@ -95,34 +127,40 @@ vows.describe('log4js cluster appender').addBatch({
|
||||
'process': fakeProcess,
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
var workerAppender = appenderModule.appender();
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
|
||||
|
||||
|
||||
var returnValue = {
|
||||
registeredProcessEvents: registeredProcessEvents,
|
||||
};
|
||||
|
||||
|
||||
return returnValue;
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
"worker appender should call process.send" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
|
||||
assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test");
|
||||
assert.equal(
|
||||
JSON.parse(topic.registeredProcessEvents[0].event).data[0],
|
||||
"workerAppender test"
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
"worker should serialize an Error correctly" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
|
||||
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
|
||||
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
assert(
|
||||
actual.match(expectedRegex),
|
||||
"Expected: \n\n " + actual + "\n\n to match " + expectedRegex
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
|
||||
@ -2,13 +2,15 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, util = require('util')
|
||||
, EE = require('events').EventEmitter
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
@ -16,7 +18,7 @@ function MockLogger() {
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
@ -37,15 +39,19 @@ function MockRequest(remoteAddr, method, originalUrl, headers) {
|
||||
}
|
||||
|
||||
function MockResponse() {
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
var r = this;
|
||||
this.end = function(chunk, encoding) {
|
||||
r.emit('finish');
|
||||
};
|
||||
|
||||
this.writeHead = function(code, headers) {
|
||||
this.statusCode = code;
|
||||
this._headers = headers;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
util.inherits(MockResponse, EE);
|
||||
|
||||
function request(cl, method, url, code, reqHeaders, resHeaders) {
|
||||
var req = new MockRequest('my.remote.addr', method, url, reqHeaders);
|
||||
var res = new MockResponse();
|
||||
@ -60,7 +66,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var clm = require('../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
@ -71,18 +77,21 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cl = clm.connectLogger(ml);
|
||||
return cl;
|
||||
},
|
||||
|
||||
|
||||
'should return a "connect logger"': function(cl) {
|
||||
assert.isFunction(cl);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'log events' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
var cb = this.callback;
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
@ -104,7 +113,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.isEmpty(messages);
|
||||
@ -114,12 +123,14 @@ vows.describe('log4js connect logger').addBatch({
|
||||
'log events with non-default level and custom format' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10); },
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -131,10 +142,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
'logger with options as string': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':method :url');
|
||||
request(cl, 'POST', 'http://meh', 200);
|
||||
return ml.messages;
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
'should use the passed in format': function(messages) {
|
||||
assert.equal(messages[0].message, 'POST http://meh');
|
||||
@ -144,6 +158,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
'auto log levels': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' });
|
||||
request(cl, 'GET', 'http://meh', 200);
|
||||
@ -151,7 +166,9 @@ vows.describe('log4js connect logger').addBatch({
|
||||
request(cl, 'GET', 'http://meh', 302);
|
||||
request(cl, 'GET', 'http://meh', 404);
|
||||
request(cl, 'GET', 'http://meh', 500);
|
||||
return ml.messages;
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'should use INFO for 2xx': function(messages) {
|
||||
@ -175,10 +192,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
'format using a function': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, function(req, res, formatFn) { return "I was called"; });
|
||||
request(cl, 'GET', 'http://blah', 200);
|
||||
return ml.messages;
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'should call the format function': function(messages) {
|
||||
@ -189,14 +209,17 @@ vows.describe('log4js connect logger').addBatch({
|
||||
'format that includes request headers': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':req[Content-Type]');
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
{ 'Content-Type': 'application/json' }
|
||||
);
|
||||
return ml.messages;
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
'should output the request header': function(messages) {
|
||||
assert.equal(messages[0].message, 'application/json');
|
||||
@ -206,6 +229,7 @@ vows.describe('log4js connect logger').addBatch({
|
||||
'format that includes response headers': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':res[Content-Type]');
|
||||
request(
|
||||
@ -214,13 +238,66 @@ vows.describe('log4js connect logger').addBatch({
|
||||
null,
|
||||
{ 'Content-Type': 'application/cheese' }
|
||||
);
|
||||
return ml.messages;
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'should output the response header': function(messages) {
|
||||
assert.equal(messages[0].message, 'application/cheese');
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
'log events with custom token' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :custom_string',
|
||||
tokens: [{
|
||||
token: ':custom_string', replacement: 'fooBAR'
|
||||
}]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url fooBAR');
|
||||
}
|
||||
},
|
||||
|
||||
'log events with custom override token' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :date',
|
||||
tokens: [{
|
||||
token: ':date', replacement: "20150310"
|
||||
}]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url 20150310');
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
@ -3,11 +3,13 @@ var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, dateFormat = require('../lib/date_format');
|
||||
|
||||
function createFixedDate() {
|
||||
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
||||
}
|
||||
|
||||
vows.describe('date_format').addBatch({
|
||||
'Date extensions': {
|
||||
topic: function() {
|
||||
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
||||
},
|
||||
topic: createFixedDate,
|
||||
'should format a date as string using a pattern': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
|
||||
@ -20,13 +22,16 @@ vows.describe('date_format').addBatch({
|
||||
'2010-01-11 14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a ISO8601 with timezone offset format': function(date) {
|
||||
'should provide a ISO8601 with timezone offset format': function() {
|
||||
var date = createFixedDate();
|
||||
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() - 660);
|
||||
date.getTimezoneOffset = function() { return -660; };
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||
"2010-01-11T14:31:30+1100"
|
||||
);
|
||||
|
||||
date = createFixedDate();
|
||||
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||
@ -40,7 +45,9 @@ vows.describe('date_format').addBatch({
|
||||
'14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a custom format': function(date) {
|
||||
'should provide a custom format': function() {
|
||||
var date = createFixedDate();
|
||||
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
|
||||
|
||||
@ -5,6 +5,7 @@ var vows = require('vows')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js')
|
||||
, assert = require('assert')
|
||||
, zlib = require('zlib')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
log4js.clearAppenders();
|
||||
@ -23,15 +24,15 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
var listenersCount = process.listeners('exit').length
|
||||
, logger = log4js.getLogger('default-settings')
|
||||
, count = 5, logfile;
|
||||
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
|
||||
}
|
||||
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
|
||||
|
||||
'does not add more than one `exit` listeners': function (initialCount) {
|
||||
assert.ok(process.listeners('exit').length <= initialCount + 1);
|
||||
}
|
||||
@ -55,7 +56,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
'../streams': {
|
||||
RollingFileStream: function(filename) {
|
||||
openedFiles.push(filename);
|
||||
|
||||
|
||||
this.end = function() {
|
||||
openedFiles.shift();
|
||||
};
|
||||
@ -63,7 +64,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
this.on = function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
for (var i=0; i < 5; i += 1) {
|
||||
@ -77,7 +78,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
assert.isEmpty(openedFiles);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'with default fileAppender settings': {
|
||||
topic: function() {
|
||||
var that = this
|
||||
@ -87,9 +88,9 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
|
||||
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
@ -99,11 +100,94 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
},
|
||||
'fileAppender subcategories': {
|
||||
topic: function() {
|
||||
var that = this;
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function addAppender(cat) {
|
||||
var testFile = path.join(
|
||||
__dirname,
|
||||
'/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
|
||||
);
|
||||
remove(testFile);
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat);
|
||||
return testFile;
|
||||
}
|
||||
|
||||
var file_sub1 = addAppender([ 'sub1']);
|
||||
|
||||
var file_sub1_sub12$sub1_sub13 = addAppender([ 'sub1.sub12', 'sub1.sub13' ]);
|
||||
|
||||
var file_sub1_sub12 = addAppender([ 'sub1.sub12' ]);
|
||||
|
||||
|
||||
var logger_sub1_sub12_sub123 = log4js.getLogger('sub1.sub12.sub123');
|
||||
|
||||
var logger_sub1_sub13_sub133 = log4js.getLogger('sub1.sub13.sub133');
|
||||
|
||||
var logger_sub1_sub14 = log4js.getLogger('sub1.sub14');
|
||||
|
||||
var logger_sub2 = log4js.getLogger('sub2');
|
||||
|
||||
|
||||
logger_sub1_sub12_sub123.info('sub1_sub12_sub123');
|
||||
|
||||
logger_sub1_sub13_sub133.info('sub1_sub13_sub133');
|
||||
|
||||
logger_sub1_sub14.info('sub1_sub14');
|
||||
|
||||
logger_sub2.info('sub2');
|
||||
|
||||
|
||||
setTimeout(function() {
|
||||
that.callback(null, {
|
||||
file_sub1: fs.readFileSync(file_sub1).toString(),
|
||||
file_sub1_sub12$sub1_sub13: fs.readFileSync(file_sub1_sub12$sub1_sub13).toString(),
|
||||
file_sub1_sub12: fs.readFileSync(file_sub1_sub12).toString()
|
||||
});
|
||||
}, 3000);
|
||||
},
|
||||
'check file contents': function (err, fileContents) {
|
||||
|
||||
// everything but category 'sub2'
|
||||
assert.match(
|
||||
fileContents.file_sub1,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/ // jshint ignore:line
|
||||
);
|
||||
assert.ok(
|
||||
fileContents.file_sub1.match(/sub123/) &&
|
||||
fileContents.file_sub1.match(/sub133/) &&
|
||||
fileContents.file_sub1.match(/sub14/)
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1.match(/sub2/));
|
||||
|
||||
// only catgories starting with 'sub1.sub12' and 'sub1.sub13'
|
||||
assert.match(
|
||||
fileContents.file_sub1_sub12$sub1_sub13,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/ //jshint ignore:line
|
||||
);
|
||||
assert.ok(
|
||||
fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) &&
|
||||
fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/)
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1_sub12$sub1_sub13.match(/sub14|sub2/));
|
||||
|
||||
// only catgories starting with 'sub1.sub12'
|
||||
assert.match(
|
||||
fileContents.file_sub1_sub12,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/ //jshint ignore:line
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1_sub12.match(/sub14|sub2|sub13/));
|
||||
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
|
||||
@ -114,7 +198,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -149,11 +233,11 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -163,11 +247,11 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 200);
|
||||
@ -184,8 +268,8 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log.2'
|
||||
]);
|
||||
},
|
||||
@ -214,6 +298,83 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 compressed backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-compressed-test.log')
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1.gz');
|
||||
remove(testFile+'.2.gz');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(
|
||||
testFile, log4js.layouts.basicLayout, 50, 2, true
|
||||
),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(
|
||||
function(file) {
|
||||
return file.indexOf('fa-maxFileSize-with-backups-compressed-test.log') > -1;
|
||||
}
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
|
||||
]);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[1])), this.callback);
|
||||
},
|
||||
'should be the third log message': function(contents) {
|
||||
assert.include(contents.toString('utf8'), 'This is the third log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[2])), this.callback);
|
||||
},
|
||||
'should be the second log message': function(contents) {
|
||||
assert.include(contents.toString('utf8'), 'This is the second log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure' : {
|
||||
@ -227,7 +388,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
|
||||
fs.readFile('tmp-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function (err, contents) {
|
||||
@ -254,7 +415,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
requires: {
|
||||
'../streams': {
|
||||
RollingFileStream: function(filename) {
|
||||
|
||||
|
||||
this.end = function() {};
|
||||
this.on = function(evt, cb) {
|
||||
if (evt === 'error') {
|
||||
@ -263,7 +424,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.appender('test1.log', null, 100);
|
||||
|
||||
@ -26,10 +26,13 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
remove(testFile);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/fileSync').appender(testFile), 'default-settings');
|
||||
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/fileSync').appender(testFile),
|
||||
'default-settings'
|
||||
);
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
},
|
||||
'should write log messages to the file': function (err, fileContents) {
|
||||
@ -37,7 +40,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
@ -52,7 +55,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -85,11 +88,11 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
@ -97,12 +100,12 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
},
|
||||
@ -118,8 +121,8 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.2'
|
||||
]);
|
||||
},
|
||||
@ -158,19 +161,19 @@ vows.describe('log4js fileSyncAppender').addBatch({
|
||||
//this config defines one file appender (to ./tmp-sync-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure({
|
||||
appenders: [{
|
||||
category: "tests",
|
||||
type: "file",
|
||||
filename: "tmp-sync-tests.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
appenders: [{
|
||||
category: "tests",
|
||||
type: "file",
|
||||
filename: "tmp-sync-tests.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
}],
|
||||
|
||||
|
||||
levels: { tests: "WARN" }
|
||||
});
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
|
||||
fs.readFile('tmp-sync-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
|
||||
@ -244,6 +244,7 @@ vows.describe('log4js gelfAppender').addBatch({
|
||||
},
|
||||
'should pick up the options': function(message) {
|
||||
assert.equal(message.host, 'cheese');
|
||||
assert.isUndefined(message.GELF); // make sure flag was removed
|
||||
assert.equal(message._facility, 'nonsense');
|
||||
assert.equal(message._every1, 'Hello every one'); // the default value
|
||||
assert.equal(message._every2, 'Overwritten!'); // the overwritten value
|
||||
|
||||
112
test/hipchatAppender-test.js
Normal file
112
test/hipchatAppender-test.js
Normal file
@ -0,0 +1,112 @@
|
||||
"use strict";
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
log4js = require('../lib/log4js'),
|
||||
sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var lastRequest = {};
|
||||
|
||||
var fakeRequest = function(args, level){
|
||||
lastRequest.notifier = this;
|
||||
lastRequest.body = args[0];
|
||||
lastRequest.callback = args[1];
|
||||
lastRequest.level = level;
|
||||
};
|
||||
|
||||
var fakeHipchatNotifier = {
|
||||
'make': function(room, token, from, host, notify){
|
||||
return {
|
||||
'room': room,
|
||||
'token': token,
|
||||
'from': from || '',
|
||||
'host': host || 'api.hipchat.com',
|
||||
'notify': notify || false,
|
||||
'setRoom': function(val){ this.room = val; },
|
||||
'setFrom': function(val){ this.from = val; },
|
||||
'setHost': function(val){ this.host = val; },
|
||||
'setNotify': function(val){ this.notify = val; },
|
||||
'info': function(){ fakeRequest.call(this, arguments, 'info'); },
|
||||
'warning': function(){ fakeRequest.call(this, arguments, 'warning'); },
|
||||
'failure': function(){ fakeRequest.call(this, arguments, 'failure'); },
|
||||
'success': function(){ fakeRequest.call(this, arguments, 'success'); }
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var hipchatModule = sandbox.require('../lib/appenders/hipchat', {
|
||||
requires: {
|
||||
'hipchat-notifier': fakeHipchatNotifier
|
||||
}
|
||||
});
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(hipchatModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
lastRequest: lastRequest
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('HipChat appender').addBatch({
|
||||
'when logging to HipChat v2 API': {
|
||||
topic: function() {
|
||||
var customCallback = function(err, res, body){ return 'works'; };
|
||||
|
||||
var setup = setupLogging('myCategory', {
|
||||
"type": "hipchat",
|
||||
"hipchat_token": "User_Token_With_Notification_Privs",
|
||||
"hipchat_room": "Room_ID_Or_Name",
|
||||
"hipchat_from": "Log4js_Test",
|
||||
"hipchat_notify": true,
|
||||
"hipchat_host": "hipchat.your-company.tld",
|
||||
"hipchat_response_callback": customCallback
|
||||
});
|
||||
setup.logger.warn('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'a request to hipchat_host should be sent': function (topic) {
|
||||
assert.equal(topic.lastRequest.notifier.host, "hipchat.your-company.tld");
|
||||
assert.equal(topic.lastRequest.notifier.notify, true);
|
||||
assert.equal(topic.lastRequest.body, 'Log event #1');
|
||||
assert.equal(topic.lastRequest.level, 'warning');
|
||||
},
|
||||
'a custom callback to the HipChat response is supported': function(topic) {
|
||||
assert.equal(topic.lastRequest.callback(), 'works');
|
||||
}
|
||||
},
|
||||
'when missing options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"type": "hipchat",
|
||||
});
|
||||
setup.logger.error('Log event #2');
|
||||
return setup;
|
||||
},
|
||||
'it sets some defaults': function (topic) {
|
||||
assert.equal(topic.lastRequest.notifier.host, "api.hipchat.com");
|
||||
assert.equal(topic.lastRequest.notifier.notify, false);
|
||||
assert.equal(topic.lastRequest.body, 'Log event #2');
|
||||
assert.equal(topic.lastRequest.level, 'failure');
|
||||
}
|
||||
},
|
||||
'when basicLayout is provided': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"type": "hipchat",
|
||||
"layout": log4js.layouts.basicLayout
|
||||
});
|
||||
setup.logger.debug('Log event #3');
|
||||
return setup;
|
||||
},
|
||||
'it should include the timestamp': function (topic) {
|
||||
|
||||
// basicLayout adds [TIMESTAMP] [LEVEL] category - message
|
||||
// e.g. [2016-06-10 11:50:53.819] [DEBUG] myLogger - Log event #23
|
||||
|
||||
assert.match(topic.lastRequest.body, /^\[[^\]]+\] \[[^\]]+\].*Log event \#3$/);
|
||||
assert.equal(topic.lastRequest.level, 'info');
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
@ -2,6 +2,7 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, os = require('os')
|
||||
, semver = require('semver')
|
||||
, EOL = os.EOL || '\n';
|
||||
|
||||
//used for patternLayout tests.
|
||||
@ -108,7 +109,7 @@ vows.describe('log4js layouts').addBatch({
|
||||
});
|
||||
},
|
||||
'should print error the contained error message': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/\{ \[Error: My Unique Error Message\]/);
|
||||
var m = layoutOutput.match(/Error: My Unique Error Message/);
|
||||
assert.isArray(m);
|
||||
},
|
||||
'should print error augmented string attributes': function(layoutOutput) {
|
||||
@ -142,7 +143,7 @@ vows.describe('log4js layouts').addBatch({
|
||||
assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
},
|
||||
'should output a stacktrace, message if the event has an error attached': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines,
|
||||
var i, layout = args[0], event = args[1], output, lines,
|
||||
error = new Error("Some made-up error"),
|
||||
stack = error.stack.split(/\n/);
|
||||
|
||||
@ -150,15 +151,26 @@ vows.describe('log4js layouts').addBatch({
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
assert.equal(lines.length - 1, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
|
||||
);
|
||||
|
||||
for (var i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+2], stack[i+1]);
|
||||
if (semver.satisfies(process.version, '>=6')) {
|
||||
assert.equal(lines.length, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
|
||||
);
|
||||
for (i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i], stack[i]);
|
||||
}
|
||||
} else {
|
||||
assert.equal(lines.length - 1, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
|
||||
);
|
||||
for (i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+2], stack[i+1]);
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
'should output any extra data in the log event as util.inspect strings': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines;
|
||||
@ -179,7 +191,7 @@ vows.describe('log4js layouts').addBatch({
|
||||
topic: function() {
|
||||
var event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
startTime: new Date('2010-12-05T14:18:30.045Z'), //new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "multiple.levels.of.tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
@ -282,14 +294,14 @@ vows.describe('log4js layouts').addBatch({
|
||||
test(args, '%x{testFunction}', 'testFunctionToken');
|
||||
},
|
||||
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
|
||||
test(args, '%x{doesNotExist}', 'null');
|
||||
},
|
||||
'%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
|
||||
test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
|
||||
},
|
||||
'%x should output the string stored in tokens': function(args) {
|
||||
test(args, '%x', '%x');
|
||||
},
|
||||
test(args, '%x', 'null');
|
||||
}
|
||||
},
|
||||
'layout makers': {
|
||||
topic: require('../lib/layouts'),
|
||||
@ -300,5 +312,19 @@ vows.describe('log4js layouts').addBatch({
|
||||
assert.ok(layouts.layout("coloured"));
|
||||
assert.ok(layouts.layout("pattern"));
|
||||
}
|
||||
},
|
||||
'add layout': {
|
||||
topic: require('../lib/layouts'),
|
||||
'should be able to add a layout': function(layouts) {
|
||||
layouts.addLayout('test_layout', function(config){
|
||||
assert.equal(config, 'test_config');
|
||||
return function(logEvent) {
|
||||
return "TEST LAYOUT >"+logEvent.data;
|
||||
};
|
||||
});
|
||||
var serializer = layouts.layout('test_layout', 'test_config');
|
||||
assert.ok(serializer);
|
||||
assert.equal(serializer({data: "INPUT"}), "TEST LAYOUT >INPUT");
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
@ -43,20 +43,22 @@ vows.describe('levels').addBatch({
|
||||
assert.isNotNull(levels.WARN);
|
||||
assert.isNotNull(levels.ERROR);
|
||||
assert.isNotNull(levels.FATAL);
|
||||
assert.isNotNull(levels.MARK);
|
||||
assert.isNotNull(levels.OFF);
|
||||
},
|
||||
'ALL': {
|
||||
topic: levels.ALL,
|
||||
'should be less than the other levels': function(all) {
|
||||
assertThat(all).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -64,12 +66,13 @@ vows.describe('levels').addBatch({
|
||||
'should be greater than no levels': function(all) {
|
||||
assertThat(all).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -78,12 +81,13 @@ vows.describe('levels').addBatch({
|
||||
assertThat(all).isEqualTo([levels.toLevel("ALL")]);
|
||||
assertThat(all).isNotEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -94,11 +98,12 @@ vows.describe('levels').addBatch({
|
||||
'should be less than DEBUG': function(trace) {
|
||||
assertThat(trace).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -108,11 +113,12 @@ vows.describe('levels').addBatch({
|
||||
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(trace).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -121,12 +127,13 @@ vows.describe('levels').addBatch({
|
||||
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -137,10 +144,11 @@ vows.describe('levels').addBatch({
|
||||
'should be less than INFO': function(debug) {
|
||||
assertThat(debug).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -150,10 +158,11 @@ vows.describe('levels').addBatch({
|
||||
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(debug).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -162,12 +171,13 @@ vows.describe('levels').addBatch({
|
||||
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
@ -177,9 +187,10 @@ vows.describe('levels').addBatch({
|
||||
topic: levels.INFO,
|
||||
'should be less than WARN': function(info) {
|
||||
assertThat(info).isLessThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
@ -187,21 +198,23 @@ vows.describe('levels').addBatch({
|
||||
'should be greater than DEBUG': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
},
|
||||
'should only be equal to INFO': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -209,32 +222,34 @@ vows.describe('levels').addBatch({
|
||||
'WARN': {
|
||||
topic: levels.WARN,
|
||||
'should be less than ERROR': function(warn) {
|
||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(warn).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
},
|
||||
'should be greater than INFO': function(warn) {
|
||||
assertThat(warn).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([
|
||||
levels.ERROR, levels.FATAL, levels.MARK, levels.OFF
|
||||
]);
|
||||
},
|
||||
'should only be equal to WARN': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -242,34 +257,35 @@ vows.describe('levels').addBatch({
|
||||
'ERROR': {
|
||||
topic: levels.ERROR,
|
||||
'should be less than FATAL': function(error) {
|
||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]);
|
||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(error).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
},
|
||||
'should be greater than WARN': function(error) {
|
||||
assertThat(error).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]);
|
||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
},
|
||||
'should only be equal to ERROR': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -277,36 +293,77 @@ vows.describe('levels').addBatch({
|
||||
'FATAL': {
|
||||
topic: levels.FATAL,
|
||||
'should be less than OFF': function(fatal) {
|
||||
assertThat(fatal).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
assertThat(fatal).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than ERROR': function(fatal) {
|
||||
assertThat(fatal).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||
]);
|
||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
},
|
||||
'should only be equal to FATAL': function(fatal) {
|
||||
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
||||
assertThat(fatal).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'MARK': {
|
||||
topic: levels.MARK,
|
||||
'should be less than OFF': function(mark) {
|
||||
assertThat(mark).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(mark).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than FATAL': function(mark) {
|
||||
assertThat(mark).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
]);
|
||||
assertThat(mark).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||
},
|
||||
'should only be equal to MARK': function(mark) {
|
||||
assertThat(mark).isEqualTo([levels.toLevel("MARK")]);
|
||||
assertThat(mark).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
@ -315,36 +372,39 @@ vows.describe('levels').addBatch({
|
||||
topic: levels.OFF,
|
||||
'should not be less than anything': function(off) {
|
||||
assertThat(off).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
},
|
||||
'should be greater than everything': function(off) {
|
||||
assertThat(off).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
},
|
||||
'should only be equal to OFF': function(off) {
|
||||
assertThat(off).isEqualTo([levels.toLevel("OFF")]);
|
||||
assertThat(off).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
}
|
||||
}
|
||||
@ -353,14 +413,14 @@ vows.describe('levels').addBatch({
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||
}
|
||||
},
|
||||
'isLessThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
||||
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||
}
|
||||
},
|
||||
'isEqualTo': {
|
||||
|
||||
96
test/logFacesAppender-test.js
Normal file
96
test/logFacesAppender-test.js
Normal file
@ -0,0 +1,96 @@
|
||||
"use strict";
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
log4js = require('../lib/log4js'),
|
||||
sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var udpSent = {};
|
||||
|
||||
var fakeDgram = {
|
||||
createSocket: function (type) {
|
||||
return {
|
||||
send: function(buffer, offset, length, port, host, callback) {
|
||||
udpSent.date = new Date();
|
||||
udpSent.host = host;
|
||||
udpSent.port = port;
|
||||
udpSent.length = length;
|
||||
udpSent.offset = 0;
|
||||
udpSent.buffer = buffer;
|
||||
callback(undefined, length);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var lfsModule = sandbox.require('../lib/appenders/logFacesAppender', {
|
||||
requires: {
|
||||
'dgram': fakeDgram
|
||||
}
|
||||
});
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(lfsModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
results: udpSent
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('logFaces UDP appender').addBatch({
|
||||
'when logging to logFaces UDP receiver': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myCategory', {
|
||||
"type": "logFacesAppender",
|
||||
"application": "LFS-TEST",
|
||||
"remoteHost": "127.0.0.1",
|
||||
"port": 55201,
|
||||
"layout": {
|
||||
"type": "pattern",
|
||||
"pattern": "%m"
|
||||
}
|
||||
});
|
||||
|
||||
setup.logger.warn('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'an UDP packet should be sent': function (topic) {
|
||||
assert.equal(topic.results.host, "127.0.0.1");
|
||||
assert.equal(topic.results.port, 55201);
|
||||
assert.equal(topic.results.offset, 0);
|
||||
var json = JSON.parse(topic.results.buffer.toString());
|
||||
assert.equal(json.a, 'LFS-TEST');
|
||||
assert.equal(json.m, 'Log event #1');
|
||||
assert.equal(json.g, 'myCategory');
|
||||
assert.equal(json.p, 'WARN');
|
||||
|
||||
// Assert timestamp, up to hours resolution.
|
||||
var date = new Date(json.t);
|
||||
assert.equal(
|
||||
date.toISOString().substring(0, 14),
|
||||
topic.results.date.toISOString().substring(0, 14)
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'when missing options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"type": "logFacesAppender",
|
||||
});
|
||||
setup.logger.error('Log event #2');
|
||||
return setup;
|
||||
},
|
||||
'it sets some defaults': function (topic) {
|
||||
assert.equal(topic.results.host, "127.0.0.1");
|
||||
assert.equal(topic.results.port, 55201);
|
||||
|
||||
var json = JSON.parse(topic.results.buffer.toString());
|
||||
assert.equal(json.a, "");
|
||||
assert.equal(json.m, 'Log event #2');
|
||||
assert.equal(json.g, 'myLogger');
|
||||
assert.equal(json.p, 'ERROR');
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
@ -7,7 +7,7 @@ function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
@ -15,7 +15,7 @@ function setupConsoleTest() {
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
'../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
@ -64,7 +64,10 @@ vows.describe('log4js').addBatch({
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
logger.error(
|
||||
"Simulated CouchDB problem",
|
||||
{ err: 127, cause: "incendiary underwear" }
|
||||
);
|
||||
return events;
|
||||
},
|
||||
|
||||
@ -86,7 +89,10 @@ vows.describe('log4js').addBatch({
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
logger.error(
|
||||
"Simulated CouchDB problem",
|
||||
{ err: 127, cause: "incendiary underwear" }
|
||||
);
|
||||
logger.flush();
|
||||
return events;
|
||||
},
|
||||
@ -95,9 +101,9 @@ vows.describe('log4js').addBatch({
|
||||
assert.equal(events.length, 6);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
'getLogger': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
@ -106,7 +112,7 @@ vows.describe('log4js').addBatch({
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
|
||||
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
@ -129,18 +135,18 @@ vows.describe('log4js').addBatch({
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
return events;
|
||||
},
|
||||
|
||||
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
|
||||
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
|
||||
'should include the error if passed in': function(events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
@ -150,6 +156,7 @@ vows.describe('log4js').addBatch({
|
||||
|
||||
'when shutdown is called': {
|
||||
topic: function() {
|
||||
var callback = this.callback;
|
||||
var events = {
|
||||
appenderShutdownCalled: false,
|
||||
shutdownCallbackCalled: false
|
||||
@ -173,9 +180,6 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
),
|
||||
shutdownCallback = function() {
|
||||
events.shutdownCallbackCalled = true;
|
||||
},
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
@ -186,11 +190,13 @@ vows.describe('log4js').addBatch({
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
log4js.shutdown(shutdownCallback);
|
||||
// Re-enable log writing so other tests that use logger are not
|
||||
// affected.
|
||||
require('../lib/logger').enableAllLogWrites();
|
||||
return events;
|
||||
log4js.shutdown(function shutdownCallback() {
|
||||
events.shutdownCallbackCalled = true;
|
||||
// Re-enable log writing so other tests that use logger are not
|
||||
// affected.
|
||||
require('../lib/logger').enableAllLogWrites();
|
||||
callback(null, events);
|
||||
});
|
||||
},
|
||||
|
||||
'should invoke appender shutdowns': function(events) {
|
||||
@ -201,7 +207,7 @@ vows.describe('log4js').addBatch({
|
||||
assert.ok(events.shutdownCallbackCalled);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'invalid configuration': {
|
||||
'should throw an exception': function() {
|
||||
assert.throws(function() {
|
||||
@ -209,15 +215,15 @@ vows.describe('log4js').addBatch({
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'configuration when passed as object': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
var appenderConfig,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
@ -228,8 +234,8 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
@ -248,10 +254,10 @@ vows.describe('log4js').addBatch({
|
||||
'configuration that causes an error': {
|
||||
topic: function() {
|
||||
var log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
@ -261,8 +267,8 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
@ -274,7 +280,7 @@ vows.describe('log4js').addBatch({
|
||||
log4js.configure(config);
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
},
|
||||
'should wrap error in a meaningful message': function(e) {
|
||||
assert.ok(e.message.indexOf('log4js configuration problem for') > -1);
|
||||
@ -283,17 +289,17 @@ vows.describe('log4js').addBatch({
|
||||
|
||||
'configuration when passed as filename': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
configFilename,
|
||||
var appenderConfig,
|
||||
configFilename,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
'../lib/log4js',
|
||||
{ requires:
|
||||
{ 'fs':
|
||||
{ statSync:
|
||||
{ statSync:
|
||||
function() {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
readFileSync:
|
||||
readFileSync:
|
||||
function(filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
@ -304,14 +310,14 @@ vows.describe('log4js').addBatch({
|
||||
]
|
||||
});
|
||||
},
|
||||
readdirSync:
|
||||
readdirSync:
|
||||
function() {
|
||||
return ['file'];
|
||||
}
|
||||
},
|
||||
'./appenders/file':
|
||||
{ name: "file",
|
||||
appender: function() {},
|
||||
},
|
||||
'./appenders/file':
|
||||
{ name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
@ -333,21 +339,21 @@ vows.describe('log4js').addBatch({
|
||||
|
||||
'with no appenders defined' : {
|
||||
topic: function() {
|
||||
var logger,
|
||||
that = this,
|
||||
var logger,
|
||||
that = this,
|
||||
fakeConsoleAppender = {
|
||||
name: "console",
|
||||
name: "console",
|
||||
appender: function() {
|
||||
return function(evt) {
|
||||
that.callback(null, evt);
|
||||
};
|
||||
},
|
||||
},
|
||||
configure: function() {
|
||||
return fakeConsoleAppender.appender();
|
||||
}
|
||||
},
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/console': fakeConsoleAppender
|
||||
@ -370,8 +376,8 @@ vows.describe('log4js').addBatch({
|
||||
},
|
||||
'without a category': {
|
||||
'should register the function as a listener for all loggers': function (log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender);
|
||||
@ -382,19 +388,19 @@ vows.describe('log4js').addBatch({
|
||||
},
|
||||
'if an appender for a category is defined': {
|
||||
'should register for that category': function (log4js) {
|
||||
var otherEvent,
|
||||
appenderEvent,
|
||||
var otherEvent,
|
||||
appenderEvent,
|
||||
cheeseLogger;
|
||||
|
||||
|
||||
log4js.addAppender(function (evt) { appenderEvent = evt; });
|
||||
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
|
||||
|
||||
|
||||
cheeseLogger = log4js.getLogger('cheese');
|
||||
cheeseLogger.debug('This is a test');
|
||||
assert.deepEqual(appenderEvent, otherEvent);
|
||||
assert.equal(otherEvent.data[0], 'This is a test');
|
||||
assert.equal(otherEvent.categoryName, 'cheese');
|
||||
|
||||
|
||||
otherEvent = undefined;
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
|
||||
@ -403,58 +409,58 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'with a category': {
|
||||
'should only register the function as a listener for that category': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender, 'tests');
|
||||
logger.debug('this is a category test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a category test');
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('some other category').debug('Cheese');
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'with multiple categories': {
|
||||
'should register the function as a listener for all the categories': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger('tests');
|
||||
|
||||
log4js.addAppender(appender, 'tests', 'biscuits');
|
||||
|
||||
|
||||
logger.debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
appenderEvent = undefined;
|
||||
|
||||
|
||||
var otherLogger = log4js.getLogger('biscuits');
|
||||
otherLogger.debug("mmm... garibaldis");
|
||||
assert.equal(appenderEvent.data[0], "mmm... garibaldis");
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
},
|
||||
'should register the function when the list of categories is an array': function(log4js) {
|
||||
var appenderEvent,
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; };
|
||||
|
||||
log4js.addAppender(appender, ['tests', 'pants']);
|
||||
|
||||
|
||||
log4js.getLogger('tests').debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
|
||||
log4js.getLogger('pants').debug("big pants");
|
||||
assert.equal(appenderEvent.data[0], "big pants");
|
||||
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
@ -462,17 +468,17 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'default setup': {
|
||||
topic: function() {
|
||||
var appenderEvents = [],
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) {
|
||||
appenderEvents.push(evt);
|
||||
};
|
||||
},
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
@ -492,43 +498,43 @@ vows.describe('log4js').addBatch({
|
||||
}
|
||||
),
|
||||
logger = log4js.getLogger('a-test');
|
||||
|
||||
|
||||
logger.debug("this is a test");
|
||||
globalConsole.log("this should not be logged");
|
||||
|
||||
|
||||
return appenderEvents;
|
||||
},
|
||||
|
||||
|
||||
'should configure a console appender': function(appenderEvents) {
|
||||
assert.equal(appenderEvents[0].data[0], 'this is a test');
|
||||
},
|
||||
|
||||
|
||||
'should not replace console.log with log4js version': function(appenderEvents) {
|
||||
assert.equal(appenderEvents.length, 1);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'console' : {
|
||||
topic: setupConsoleTest,
|
||||
|
||||
|
||||
'when replaceConsole called': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
|
||||
|
||||
test.fakeConsole.log("Some debug message someone put in a module");
|
||||
test.fakeConsole.debug("Some debug");
|
||||
test.fakeConsole.error("An error");
|
||||
test.fakeConsole.info("some info");
|
||||
test.fakeConsole.warn("a warning");
|
||||
|
||||
|
||||
test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
test.fakeConsole.log({ lumpy: "tapioca" });
|
||||
test.fakeConsole.log("count %d", 123);
|
||||
test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents.length, 9);
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
@ -590,7 +596,7 @@ vows.describe('log4js').addBatch({
|
||||
test.fakeConsole.debug("Some debug");
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
|
||||
'should allow for turning on console replacement': function (logEvents) {
|
||||
assert.equal(logEvents.length, 1);
|
||||
assert.equal(logEvents[0].level.toString(), "DEBUG");
|
||||
@ -603,13 +609,13 @@ vows.describe('log4js').addBatch({
|
||||
var logEvent,
|
||||
firstLog4js = require('../lib/log4js'),
|
||||
secondLog4js;
|
||||
|
||||
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
|
||||
secondLog4js = require('../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
|
||||
return logEvent;
|
||||
},
|
||||
'should maintain appenders between requires': function (logEvent) {
|
||||
|
||||
@ -1,18 +1,18 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
|
||||
var fakeLoggly = {
|
||||
createClient: function (options) {
|
||||
createClient: function(options) {
|
||||
return {
|
||||
config: options,
|
||||
log: function (msg, tags) {
|
||||
log: function(msg, tags) {
|
||||
msgs.push({
|
||||
msg: msg,
|
||||
tags: tags
|
||||
@ -50,7 +50,7 @@ function setupLogging(category, options) {
|
||||
});
|
||||
|
||||
log4js.addAppender(logglyModule.configure(options), category);
|
||||
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
loggly: fakeLoggly,
|
||||
@ -61,22 +61,50 @@ function setupLogging(category, options) {
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function setupTaggedLogging() {
|
||||
return setupLogging('loggly', {
|
||||
token: 'your-really-long-input-token',
|
||||
subdomain: 'your-subdomain',
|
||||
tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
|
||||
});
|
||||
}
|
||||
|
||||
vows.describe('log4js logglyAppender').addBatch({
|
||||
'minimal config': {
|
||||
'with minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('loggly', {
|
||||
token: 'your-really-long-input-token',
|
||||
subdomain: 'your-subdomain',
|
||||
tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
|
||||
});
|
||||
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
var setup = setupTaggedLogging();
|
||||
setup.logger.log('trace', 'Log event #1', 'Log 2', { tags: ['tag1', 'tag2'] });
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (topic) {
|
||||
//console.log('topic', topic);
|
||||
'has a results.length of 1': function(topic) {
|
||||
assert.equal(topic.results.length, 1);
|
||||
},
|
||||
'has a result msg with both args concatenated': function(topic) {
|
||||
assert.equal(topic.results[0].msg.msg, 'Log event #1 Log 2');
|
||||
},
|
||||
'has a result tags with the arg that contains tags': function(topic) {
|
||||
assert.deepEqual(topic.results[0].tags, ['tag1', 'tag2']);
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'config with object with tags and other keys': {
|
||||
topic: function() {
|
||||
var setup = setupTaggedLogging();
|
||||
|
||||
// ignore this tags object b/c there are 2 keys
|
||||
setup.logger.log('trace', 'Log event #1', { other: 'other', tags: ['tag1', 'tag2'] });
|
||||
return setup;
|
||||
},
|
||||
'has a results.length of 1': function(topic) {
|
||||
assert.equal(topic.results.length, 1);
|
||||
},
|
||||
'has a result msg with the args concatenated': function(topic) {
|
||||
assert.equal(topic.results[0].msg.msg,
|
||||
'Log event #1 { other: \'other\', tags: [ \'tag1\', \'tag2\' ] }');
|
||||
},
|
||||
'has a result tags with the arg that contains no tags': function(topic) {
|
||||
assert.deepEqual(topic.results[0].tags, []);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
"use strict";
|
||||
var sys = require("sys");
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
@ -8,7 +7,7 @@ var vows = require('vows')
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var udpSent = {};
|
||||
|
||||
|
||||
var fakeDgram = {
|
||||
createSocket: function (type) {
|
||||
return {
|
||||
@ -32,7 +31,7 @@ function setupLogging(category, options) {
|
||||
});
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(logstashModule.configure(options), category);
|
||||
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
results: udpSent
|
||||
@ -42,7 +41,7 @@ function setupLogging(category, options) {
|
||||
vows.describe('logstashUDP appender').addBatch({
|
||||
'when logging with logstash via UDP': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('logstashUDP', {
|
||||
var setup = setupLogging('myCategory', {
|
||||
"host": "127.0.0.1",
|
||||
"port": 10001,
|
||||
"type": "logstashUDP",
|
||||
@ -102,5 +101,26 @@ vows.describe('logstashUDP appender').addBatch({
|
||||
assert.equal(json.type, 'myLogger');
|
||||
assert.equal(JSON.stringify(json.fields), JSON.stringify({'level': 'TRACE'}));
|
||||
}
|
||||
},
|
||||
|
||||
'when extra fields provided': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"host": "127.0.0.1",
|
||||
"port": 10001,
|
||||
"type": "logstashUDP",
|
||||
"category": "myLogger",
|
||||
"layout": {
|
||||
"type": "dummy"
|
||||
}
|
||||
});
|
||||
setup.logger.log('trace', 'Log event #1', {'extra1': 'value1', 'extra2': 'value2'});
|
||||
return setup;
|
||||
},'they should be added to fields structure': function (topic) {
|
||||
var json = JSON.parse(topic.results.buffer.toString());
|
||||
var fields = {'extra1': 'value1', 'extra2': 'value2', 'level': 'TRACE'};
|
||||
assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
|
||||
190
test/mailgunAppender-test.js
Normal file
190
test/mailgunAppender-test.js
Normal file
@ -0,0 +1,190 @@
|
||||
"use strict";
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../lib/log4js');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var mailgunCredentials = {
|
||||
apiKey: options.apikey,
|
||||
domain: options.domain
|
||||
};
|
||||
|
||||
var fakeMailgun = function (conf) {
|
||||
return {
|
||||
messages: function () {
|
||||
return {
|
||||
config: options,
|
||||
send: function (data, callback) {
|
||||
msgs.push(data);
|
||||
callback(false, {status:"OK"});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function (type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
logs: [],
|
||||
error: function (msg, value) {
|
||||
this.errors.push({msg: msg, value: value});
|
||||
},
|
||||
log: function (msg, value) {
|
||||
this.logs.push({msg: msg, value: value});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var mailgunModule = sandbox.require('../lib/appenders/mailgun', {
|
||||
requires: {
|
||||
'mailgun-js': fakeMailgun,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
log4js.addAppender(mailgunModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailgun,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
mails: msgs,
|
||||
credentials: mailgunCredentials
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages(result) {
|
||||
for (var i = 0; i < result.mails.length; ++i) {
|
||||
assert.equal(result.mails[i].from, 'sender@domain.com');
|
||||
assert.equal(result.mails[i].to, 'recepient@domain.com');
|
||||
assert.equal(result.mails[i].subject, 'This is subject');
|
||||
assert.ok(new RegExp('.+Log event #' + (i + 1)).test(result.mails[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
vows.describe('log4js mailgunAppender').addBatch({
|
||||
'mailgun setup': {
|
||||
topic: setupLogging('mailgun setup', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
}),
|
||||
'mailgun credentials should match': function(result){
|
||||
assert.equal(result.credentials.apiKey, 'APIKEY');
|
||||
assert.equal(result.credentials.domain, 'DOMAIN');
|
||||
}
|
||||
},
|
||||
|
||||
'basic usage': {
|
||||
topic: function(){
|
||||
var setup = setupLogging('basic usage', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
});
|
||||
|
||||
setup.logger.info("Log event #1");
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.mails.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function (result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'error when sending email': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
});
|
||||
|
||||
setup.mailer.messages = function () {
|
||||
return {
|
||||
send: function (msg, cb) {
|
||||
cb({msg: "log4js.mailgunAppender - Error happened"}, null);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
setup.logger.info("This will break");
|
||||
return setup.console;
|
||||
},
|
||||
'should be logged to console': function (cons) {
|
||||
assert.equal(cons.errors.length, 1);
|
||||
assert.equal(cons.errors[0].msg, 'log4js.mailgunAppender - Error happened');
|
||||
}
|
||||
},
|
||||
'separate email for each event': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.mails.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
@ -66,7 +66,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
|
||||
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
@ -76,7 +76,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
fakeNet.cbs.connect();
|
||||
appender('after error, after connect');
|
||||
appender(new Error('Error test'));
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to the loggerPort and loggerHost': function(net) {
|
||||
@ -101,10 +101,16 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
},
|
||||
'should serialize an Error correctly': function(net) {
|
||||
assert(JSON.parse(net.data[8]).stack, "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property");
|
||||
assert(
|
||||
JSON.parse(net.data[8]).stack,
|
||||
"Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property"
|
||||
);
|
||||
var actual = JSON.parse(net.data[8]).stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
assert(
|
||||
actual.match(expectedRegex),
|
||||
"Expected: \n\n " + actual + "\n\n to match " + expectedRegex
|
||||
);
|
||||
|
||||
}
|
||||
},
|
||||
@ -119,7 +125,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
@ -130,7 +136,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
appender('after close, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after close, after connect');
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should attempt to re-open the socket': function(net) {
|
||||
@ -154,7 +160,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to localhost:5000': function(net) {
|
||||
@ -177,9 +183,9 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
loggerPort: 1234,
|
||||
actualAppender: fakeNet.fakeAppender.bind(fakeNet)
|
||||
});
|
||||
|
||||
|
||||
appender('this should be sent to the actual appender directly');
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on loggerPort and loggerHost': function(net) {
|
||||
@ -195,7 +201,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
{ level: { level: 10000, levelStr: 'DEBUG' }
|
||||
, data: ['some debug']}
|
||||
) + '__LOG4JS__';
|
||||
|
||||
|
||||
net.cbs.data(
|
||||
JSON.stringify(
|
||||
{ level: { level: 40000, levelStr: 'ERROR' }
|
||||
@ -253,7 +259,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'master' });
|
||||
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on localhost:5000': function(net) {
|
||||
@ -296,7 +302,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
);
|
||||
|
||||
return results;
|
||||
|
||||
|
||||
},
|
||||
'should load underlying appender for master': function(results) {
|
||||
assert.equal(results.appenderLoaded, 'madeupappender');
|
||||
|
||||
138
test/newLevel-test.js
Normal file
138
test/newLevel-test.js
Normal file
@ -0,0 +1,138 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, Level = require('../lib/levels')
|
||||
, log4js = require('../lib/log4js')
|
||||
, loggerModule = require('../lib/logger')
|
||||
, Logger = loggerModule.Logger;
|
||||
|
||||
vows.describe('../lib/logger').addBatch({
|
||||
'creating a new log level': {
|
||||
topic: function () {
|
||||
Level.forName("DIAG", 6000);
|
||||
return new Logger();
|
||||
},
|
||||
|
||||
'should export new log level in levels module': function (logger) {
|
||||
assert.isDefined(Level.DIAG);
|
||||
assert.equal(Level.DIAG.levelStr, "DIAG");
|
||||
assert.equal(Level.DIAG.level, 6000);
|
||||
},
|
||||
|
||||
'should create named function on logger prototype': function(logger) {
|
||||
assert.isFunction(logger.diag);
|
||||
},
|
||||
|
||||
'should create isLevelEnabled function on logger prototype': function(logger) {
|
||||
assert.isFunction(logger.isDiagEnabled);
|
||||
},
|
||||
},
|
||||
|
||||
'creating a new log level with underscores': {
|
||||
topic: function () {
|
||||
Level.forName("NEW_LEVEL_OTHER", 6000);
|
||||
return new Logger();
|
||||
},
|
||||
|
||||
'should export new log level to levels module': function (logger) {
|
||||
assert.isDefined(Level.NEW_LEVEL_OTHER);
|
||||
assert.equal(Level.NEW_LEVEL_OTHER.levelStr, "NEW_LEVEL_OTHER");
|
||||
assert.equal(Level.NEW_LEVEL_OTHER.level, 6000);
|
||||
},
|
||||
|
||||
'should create named function on logger prototype in camel case': function(logger) {
|
||||
assert.isFunction(logger.newLevelOther);
|
||||
},
|
||||
|
||||
'should create named isLevelEnabled function on logger prototype in camel case':
|
||||
function(logger) {
|
||||
assert.isFunction(logger.isNewLevelOtherEnabled);
|
||||
}
|
||||
},
|
||||
|
||||
'creating log events containing newly created log level': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
|
||||
logger.log(Level.forName("LVL1", 6000), "Event 1");
|
||||
logger.log(Level.getLevel("LVL1"), "Event 2");
|
||||
logger.log("LVL1", "Event 3");
|
||||
logger.lvl1("Event 4");
|
||||
|
||||
logger.setLevel(Level.forName("LVL2", 7000));
|
||||
logger.lvl1("Event 5");
|
||||
|
||||
return events;
|
||||
},
|
||||
|
||||
'should show log events with new log level': function(events) {
|
||||
assert.equal(events[0].level.toString(), "LVL1");
|
||||
assert.equal(events[0].data[0], "Event 1");
|
||||
|
||||
assert.equal(events[1].level.toString(), "LVL1");
|
||||
assert.equal(events[1].data[0], "Event 2");
|
||||
|
||||
assert.equal(events[2].level.toString(), "LVL1");
|
||||
assert.equal(events[2].data[0], "Event 3");
|
||||
|
||||
assert.equal(events[3].level.toString(), "LVL1");
|
||||
assert.equal(events[3].data[0], "Event 4");
|
||||
},
|
||||
|
||||
'should not be present if min log level is greater than newly created level':
|
||||
function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
}
|
||||
},
|
||||
|
||||
'creating a new log level with incorrect parameters': {
|
||||
topic: function() {
|
||||
log4js.levels.forName(9000, "FAIL_LEVEL_1");
|
||||
log4js.levels.forName("FAIL_LEVEL_2");
|
||||
return new Logger();
|
||||
},
|
||||
|
||||
'should fail to create the level': function(logger) {
|
||||
assert.isUndefined(Level.FAIL_LEVEL_1);
|
||||
assert.isUndefined(Level.FAIL_LEVEL_2);
|
||||
}
|
||||
},
|
||||
|
||||
'calling log with an undefined log level': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
|
||||
logger.log("LEVEL_DOES_NEXT_EXIST", "Event 1");
|
||||
logger.log(Level.forName("LEVEL_DOES_NEXT_EXIST"), "Event 2");
|
||||
|
||||
return events;
|
||||
},
|
||||
|
||||
'should fallback to the default log level (INFO)': function(events) {
|
||||
assert.equal(events[0].level.toString(), "INFO");
|
||||
assert.equal(events[1].level.toString(), "INFO");
|
||||
}
|
||||
},
|
||||
|
||||
'creating a new level with an existing level name': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
|
||||
logger.log(log4js.levels.forName("MY_LEVEL", 9000), "Event 1");
|
||||
logger.log(log4js.levels.forName("MY_LEVEL", 8000), "Event 1");
|
||||
|
||||
return events;
|
||||
},
|
||||
|
||||
'should override the existing log level': function(events) {
|
||||
assert.equal(events[0].level.level, 9000);
|
||||
assert.equal(events[1].level.level, 8000);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
@ -1,6 +1,8 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, util = require('util')
|
||||
, EE = require('events').EventEmitter
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
@ -21,7 +23,7 @@ function MockLogger() {
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl) {
|
||||
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
@ -31,13 +33,14 @@ function MockRequest(remoteAddr, method, originalUrl) {
|
||||
}
|
||||
|
||||
function MockResponse(statusCode) {
|
||||
|
||||
var r = this;
|
||||
this.statusCode = statusCode;
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
|
||||
r.emit('finish');
|
||||
};
|
||||
}
|
||||
util.inherits(MockResponse, EE);
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
@ -61,10 +64,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
},10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -81,10 +87,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
},10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -98,15 +107,18 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -123,10 +135,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -136,10 +151,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -152,15 +170,18 @@ vows.describe('log4js connect logger').addBatch({
|
||||
var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -177,10 +198,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -191,10 +215,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -212,10 +239,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
@ -232,10 +262,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
@ -246,10 +279,13 @@ vows.describe('log4js connect logger').addBatch({
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
|
||||
168
test/slackAppender-test.js
Normal file
168
test/slackAppender-test.js
Normal file
@ -0,0 +1,168 @@
|
||||
"use strict";
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../lib/log4js');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var slackCredentials = {
|
||||
token: options.token,
|
||||
channel_id: options.channel_id,
|
||||
username: options.username,
|
||||
format: options.format,
|
||||
icon_url: options.icon_url
|
||||
};
|
||||
var fakeSlack = (function (key) {
|
||||
function constructor() {
|
||||
return {
|
||||
options: key,
|
||||
api: function (action, data, callback) {
|
||||
msgs.push(data);
|
||||
callback(false, {status: "sent"});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return constructor(key);
|
||||
});
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function (type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
coloredLayout: log4js.layouts.coloredLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
logs: [],
|
||||
error: function (msg, value) {
|
||||
this.errors.push({msg: msg, value: value});
|
||||
},
|
||||
log: function (msg, value) {
|
||||
this.logs.push({msg: msg, value: value});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var slackModule = sandbox.require('../lib/appenders/slack', {
|
||||
requires: {
|
||||
'slack-node': fakeSlack,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
log4js.addAppender(slackModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeSlack,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
messages: msgs,
|
||||
credentials: slackCredentials
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages(result) {
|
||||
for (var i = 0; i < result.messages.length; ++i) {
|
||||
assert.equal(result.messages[i].channel, '#CHANNEL');
|
||||
assert.equal(result.messages[i].username, 'USERNAME');
|
||||
assert.ok(new RegExp('.+Log event #' + (i + 1)).test(result.messages[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
vows.describe('log4js slackAppender').addBatch({
|
||||
'slack setup': {
|
||||
topic: setupLogging('slack setup', {
|
||||
token: 'TOKEN',
|
||||
channel_id: "#CHANNEL",
|
||||
username: "USERNAME",
|
||||
format: "FORMAT",
|
||||
icon_url: "ICON_URL"
|
||||
}),
|
||||
'slack credentials should match': function (result) {
|
||||
assert.equal(result.credentials.token, 'TOKEN');
|
||||
assert.equal(result.credentials.channel_id, '#CHANNEL');
|
||||
assert.equal(result.credentials.username, 'USERNAME');
|
||||
assert.equal(result.credentials.format, 'FORMAT');
|
||||
assert.equal(result.credentials.icon_url, 'ICON_URL');
|
||||
}
|
||||
},
|
||||
|
||||
'basic usage': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('basic usage', {
|
||||
token: 'TOKEN',
|
||||
channel_id: "#CHANNEL",
|
||||
username: "USERNAME",
|
||||
format: "FORMAT",
|
||||
icon_url: "ICON_URL",
|
||||
});
|
||||
|
||||
setup.logger.info("Log event #1");
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.messages.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function (result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'separate notification for each event': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate notification for each event', {
|
||||
token: 'TOKEN',
|
||||
channel_id: "#CHANNEL",
|
||||
username: "USERNAME",
|
||||
format: "FORMAT",
|
||||
icon_url: "ICON_URL",
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.messages.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
@ -1,231 +1,318 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../lib/log4js');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
var msgs = [];
|
||||
|
||||
var fakeMailer = {
|
||||
createTransport: function (name, options) {
|
||||
return {
|
||||
config: options,
|
||||
sendMail: function (msg, callback) {
|
||||
msgs.push(msg);
|
||||
callback(null, true);
|
||||
var fakeMailer = {
|
||||
createTransport: function (name, options) {
|
||||
return {
|
||||
config: options,
|
||||
sendMail: function (msg, callback) {
|
||||
msgs.push(msg);
|
||||
callback(null, true);
|
||||
},
|
||||
close: function () {
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function (type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
close: function() {}
|
||||
};
|
||||
}
|
||||
};
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function(type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function (msg, value) {
|
||||
this.errors.push({msg: msg, value: value});
|
||||
}
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function(msg, value) {
|
||||
this.errors.push({ msg: msg, value: value });
|
||||
}
|
||||
};
|
||||
var fakeTransportPlugin = function () {
|
||||
};
|
||||
|
||||
var smtpModule = sandbox.require('../lib/appenders/smtp', {
|
||||
requires: {
|
||||
'nodemailer': fakeMailer,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
var smtpModule = sandbox.require('../lib/appenders/smtp', {
|
||||
requires: {
|
||||
'nodemailer': fakeMailer,
|
||||
'nodemailer-sendmail-transport': fakeTransportPlugin,
|
||||
'nodemailer-smtp-transport': fakeTransportPlugin,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(smtpModule.configure(options), category);
|
||||
log4js.addAppender(smtpModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailer,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailer,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages (result, sender, subject) {
|
||||
for (var i = 0; i < result.results.length; ++i) {
|
||||
assert.equal(result.results[i].from, sender);
|
||||
assert.equal(result.results[i].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1));
|
||||
assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].text));
|
||||
}
|
||||
function checkMessages(result, sender, subject) {
|
||||
for (var i = 0; i < result.results.length; ++i) {
|
||||
assert.equal(result.results[i].from, sender);
|
||||
assert.equal(result.results[i].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i + 1));
|
||||
assert.ok(new RegExp('.+Log event #' + (i + 1) + '\n$').test(result.results[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
vows.describe('log4js smtpAppender').addBatch({
|
||||
'minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('minimal config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
'minimal config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('minimal config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'fancy config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('fancy config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sender: 'sender@domain.com',
|
||||
subject: 'This is subject',
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
},
|
||||
'fancy config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('fancy config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sender: 'sender@domain.com',
|
||||
subject: 'This is subject',
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result, 'sender@domain.com', 'This is subject');
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result, 'sender@domain.com', 'This is subject');
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
'config with layout': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function (result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function(result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'separate email for each event': {
|
||||
topic: function() {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
'separate email for each event': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
recipients: 'recipient@domain.com',
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.results.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.results.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'multiple events in one email': {
|
||||
topic: function() {
|
||||
var self = this;
|
||||
var setup = setupLogging('multiple events in one email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 1,
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
'multiple events in one email': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('multiple events in one email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 1,
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 100);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1500);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be two messages': function (result) {
|
||||
assert.equal(result.results.length, 2);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[0].subject, 'Log event #1');
|
||||
assert.equal(
|
||||
result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length,
|
||||
2
|
||||
);
|
||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[1].subject, 'Log event #3');
|
||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 100);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1500);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be two messages': function (result) {
|
||||
assert.equal(result.results.length, 2);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[0].subject, 'Log event #1');
|
||||
assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
|
||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[1].subject, 'Log event #3');
|
||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
|
||||
}
|
||||
},
|
||||
'error when sending email': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('error when sending email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 0,
|
||||
transport: 'SMTP',
|
||||
SMTP: { port: 25, auth: { user: 'user@domain.com' } }
|
||||
});
|
||||
'error when sending email': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('error when sending email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 0,
|
||||
SMTP: {port: 25, auth: {user: 'user@domain.com'}}
|
||||
});
|
||||
|
||||
setup.mailer.createTransport = function() {
|
||||
return {
|
||||
sendMail: function(msg, cb) {
|
||||
cb({ message: "oh noes" });
|
||||
},
|
||||
close: function() { }
|
||||
};
|
||||
};
|
||||
setup.mailer.createTransport = function () {
|
||||
return {
|
||||
sendMail: function (msg, cb) {
|
||||
cb({message: "oh noes"});
|
||||
},
|
||||
close: function () {
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
setup.logger.info("This will break");
|
||||
return setup.console;
|
||||
setup.logger.info("This will break");
|
||||
return setup.console;
|
||||
},
|
||||
'should be logged to console': function (cons) {
|
||||
assert.equal(cons.errors.length, 1);
|
||||
assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened");
|
||||
assert.equal(cons.errors[0].value.message, 'oh noes');
|
||||
}
|
||||
},
|
||||
'should be logged to console': function(cons) {
|
||||
assert.equal(cons.errors.length, 1);
|
||||
assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened");
|
||||
assert.equal(cons.errors[0].value.message, 'oh noes');
|
||||
'transport full config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('transport full config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: {
|
||||
plugin: 'sendmail',
|
||||
options: {
|
||||
path: '/usr/sbin/sendmail'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'transport no-options config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('transport no-options config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: {
|
||||
plugin: 'sendmail'
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'transport no-plugin config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('transport no-plugin config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: {
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'attachment config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('attachment config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
attachment: {
|
||||
enable: true
|
||||
},
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
assert.equal(result.results[0].attachments.length, 1);
|
||||
var attachment = result.results[0].attachments[0];
|
||||
assert.equal(result.results[0].text, "See logs as attachment");
|
||||
assert.equal(attachment.filename, "default.log");
|
||||
assert.equal(attachment.contentType, "text/x-log");
|
||||
assert.ok(new RegExp('.+Log event #' + 1 + '\n$').test(attachment.content));
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
35
test/stderrAppender-test.js
Normal file
35
test/stderrAppender-test.js
Normal file
@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, layouts = require('../lib/layouts')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('../lib/appenders/stderr').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
var messages = []
|
||||
, fakeProcess = {
|
||||
stderr: {
|
||||
write: function(msg) { messages.push(msg); }
|
||||
}
|
||||
}
|
||||
, appenderModule = sandbox.require(
|
||||
'../lib/appenders/stderr',
|
||||
{
|
||||
globals: {
|
||||
'process': fakeProcess
|
||||
}
|
||||
}
|
||||
)
|
||||
, appender = appenderModule.appender(layouts.messagePassThroughLayout);
|
||||
|
||||
appender({ data: ["blah"] });
|
||||
return messages;
|
||||
},
|
||||
|
||||
'should output to stderr': function(messages) {
|
||||
assert.equal(messages[0], 'blah\n');
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
@ -1,6 +1,5 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, async = require('async')
|
||||
, assert = require('assert')
|
||||
, events = require('events')
|
||||
, fs = require('fs')
|
||||
@ -119,19 +118,11 @@ vows.describe('RollingFileStream').addBatch({
|
||||
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
||||
var that = this
|
||||
, stream = new RollingFileStream(
|
||||
__dirname + "/test-rolling-file-stream-write-more",
|
||||
__dirname + "/test-rolling-file-stream-write-more",
|
||||
45
|
||||
);
|
||||
async.each(
|
||||
[0, 1, 2, 3, 4, 5, 6],
|
||||
function(i, cb) {
|
||||
stream.write(i +".cheese\n", "utf8", cb);
|
||||
},
|
||||
function() {
|
||||
stream.end();
|
||||
that.callback();
|
||||
}
|
||||
);
|
||||
|
||||
write7Cheese(that, stream);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
@ -183,16 +174,8 @@ vows.describe('RollingFileStream').addBatch({
|
||||
45,
|
||||
5
|
||||
);
|
||||
async.each(
|
||||
[0, 1, 2, 3, 4, 5, 6],
|
||||
function(i, cb) {
|
||||
stream.write(i +".cheese\n", "utf8", cb);
|
||||
},
|
||||
function() {
|
||||
stream.end();
|
||||
that.callback();
|
||||
}
|
||||
);
|
||||
|
||||
write7Cheese(that, stream);
|
||||
},
|
||||
'the files': {
|
||||
topic: function() {
|
||||
@ -206,5 +189,19 @@ vows.describe('RollingFileStream').addBatch({
|
||||
assert.include(files, 'test-rolling-stream-with-existing-files.20');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
|
||||
function write7Cheese(that, stream) {
|
||||
var streamed = 0;
|
||||
[0, 1, 2, 3, 4, 5, 6].forEach(function(i) {
|
||||
stream.write(i +".cheese\n", "utf8", function(e) {
|
||||
streamed++;
|
||||
if (e) { return that.callback(e); }
|
||||
if (streamed === 7) {
|
||||
stream.end();
|
||||
that.callback();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
86
test/subcategories-test.js
Normal file
86
test/subcategories-test.js
Normal file
@ -0,0 +1,86 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
vows.describe('subcategories').addBatch({
|
||||
'loggers created after levels configuration is loaded': {
|
||||
topic: function() {
|
||||
|
||||
log4js.configure({
|
||||
"levels": {
|
||||
"sub1": "WARN",
|
||||
"sub1.sub11": "TRACE",
|
||||
"sub1.sub11.sub111": "WARN",
|
||||
"sub1.sub12": "INFO"
|
||||
}
|
||||
}, { reloadSecs: 30 });
|
||||
|
||||
return {
|
||||
"sub1": log4js.getLogger('sub1'), // WARN
|
||||
"sub11": log4js.getLogger('sub1.sub11'), // TRACE
|
||||
"sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||
"sub12": log4js.getLogger('sub1.sub12'), // INFO
|
||||
|
||||
"sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||
"sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||
"sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||
"sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||
};
|
||||
},
|
||||
'check logger levels': function(loggers) {
|
||||
assert.equal(loggers.sub1.level, levels.WARN);
|
||||
assert.equal(loggers.sub11.level, levels.TRACE);
|
||||
assert.equal(loggers.sub111.level, levels.WARN);
|
||||
assert.equal(loggers.sub12.level, levels.INFO);
|
||||
|
||||
assert.equal(loggers.sub13.level, levels.WARN);
|
||||
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||
assert.equal(loggers.sub121.level, levels.INFO);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
'loggers created before levels configuration is loaded': {
|
||||
topic: function() {
|
||||
|
||||
var loggers = {
|
||||
"sub1": log4js.getLogger('sub1'), // WARN
|
||||
"sub11": log4js.getLogger('sub1.sub11'), // TRACE
|
||||
"sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||
"sub12": log4js.getLogger('sub1.sub12'), // INFO
|
||||
|
||||
"sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||
"sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||
"sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||
"sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||
};
|
||||
|
||||
|
||||
log4js.configure({
|
||||
"levels": {
|
||||
"sub1": "WARN",
|
||||
"sub1.sub11": "TRACE",
|
||||
"sub1.sub11.sub111": "WARN",
|
||||
"sub1.sub12": "INFO"
|
||||
}
|
||||
}, { reloadSecs: 30 });
|
||||
|
||||
return loggers;
|
||||
|
||||
|
||||
},
|
||||
'check logger levels': function(loggers) {
|
||||
assert.equal(loggers.sub1.level, levels.WARN);
|
||||
assert.equal(loggers.sub11.level, levels.TRACE);
|
||||
assert.equal(loggers.sub111.level, levels.WARN);
|
||||
assert.equal(loggers.sub12.level, levels.INFO);
|
||||
|
||||
assert.equal(loggers.sub13.level, levels.WARN);
|
||||
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||
assert.equal(loggers.sub121.level, levels.INFO);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
Loading…
x
Reference in New Issue
Block a user