Merge branch 'master'

Conflicts:
	.travis.yml
	lib/appenders/dateFile.js
	lib/appenders/file.js
	lib/appenders/fileSync.js
	lib/appenders/gelf.js
	lib/appenders/logFacesAppender.js
	lib/date_format.js
	lib/debug.js
	lib/layouts.js
	lib/log4js.js
	lib/log4js.json
	lib/logger.js
	lib/streams/BaseRollingFileStream.js
	lib/streams/DateRollingFileStream.js
	lib/streams/RollingFileStream.js
	lib/streams/index.js
	package.json
	test/layouts-test.js
	test/streams/BaseRollingFileStream-test.js
	test/streams/DateRollingFileStream-test.js
	test/streams/rollingFileStream-test.js
This commit is contained in:
e-cloud 2016-12-12 17:42:34 +08:00
commit 02d6c017f4
74 changed files with 796 additions and 1376 deletions

View File

@ -1,18 +1,17 @@
# log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node)
[![NPM](https://nodei.co/npm/log4js.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/log4js/)
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
Out of the box it supports the following features:
* coloured console logging to stdout or stderr
* replacement of node's console.log functions (optional)
* file appender, with log rolling based on file size
* file appender, with configurable log rolling based on file size or date
* SMTP appender
* GELF appender
* hook.io appender
* Loggly appender
* Logstash UDP appender
* logFaces appender
@ -21,6 +20,14 @@ Out of the box it supports the following features:
* configurable log message layout/patterns
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
## Important changes in 1.0
The default appender has been changed from `console` to `stdout` - this alleviates a memory problem that happens when logging using console. If you're using log4js in a browser (via browserify), then you'll probably need to explicitly configure log4js to use the console appender now (unless browserify handles process.stdout).
I'm also trying to move away from `vows` for the tests, and use `tape` instead. New tests should be added to `test/tape`, not the vows ones.
log4js also no longer supports node versions below 0.12.x.
NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this:
```javascript
@ -51,7 +58,7 @@ By default, log4js outputs to stdout with the coloured layout (thanks to [masylu
```
See example.js for a full example, but here's a snippet (also in fromreadme.js):
```javascript
var log4js = require('log4js');
var log4js = require('log4js');
//console log is loaded by default, so you won't normally need to do this
//log4js.loadAppender('console');
log4js.loadAppender('file');
@ -87,10 +94,10 @@ log4js.configure({
## configuration
You can configure the appenders and log levels manually (as above), or provide a
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
configuration file location may also be specified via the environment variable
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
configuration file location may also be specified via the environment variable
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
An example file can be found in `test/vows/log4js.json`. An example config file with log rolling is in `test/vows/with-log-rolling.json`.
You can configure log4js to check for configuration file changes at regular intervals, and if changed, reload. This allows changes to logging levels to occur without restarting the application.
To turn it on and specify a period:

View File

@ -1,3 +1,4 @@
"use strict";
var log4js = require('../lib/log4js');
//log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
@ -55,6 +56,3 @@ anotherLogger.debug("Just checking");
//will also go to console, since that's configured for all categories
var pantsLog = log4js.getLogger('pants');
pantsLog.debug("Something for pants");

View File

@ -1,5 +1,5 @@
//remember to change the require to just 'log4js' if you've npm install'ed it
var log4js = require('./lib/log4js');
var log4js = require('../lib/log4js');
//by default the console appender is loaded
//log4js.loadAppender('console');
//you'd only need to add the console appender if you

36
examples/log-to-files.js Normal file
View File

@ -0,0 +1,36 @@
"use strict";
var path = require('path')
, log4js = require('../lib/log4js');
log4js.configure(
{
appenders: [
{
type: "file",
filename: "important-things.log",
maxLogSize: 10*1024*1024, // = 10Mb
numBackups: 5, // keep five backup files
compress: true, // compress the backups
encoding: 'utf-8',
mode: parseInt('0640', 8),
flags: 'w+'
},
{
type: "dateFile",
filename: "more-important-things.log",
pattern: "yyyy-MM-dd-hh",
compress: true
},
{
type: "stdout"
}
]
}
);
var logger = log4js.getLogger('things');
logger.debug("This little thing went to market");
logger.info("This little thing stayed at home");
logger.error("This little thing had roast beef");
logger.fatal("This little thing had none");
logger.trace("and this little thing went wee, wee, wee, all the way home.");

View File

@ -1,4 +1,4 @@
var log4js = require('./lib/log4js')
var log4js = require('../lib/log4js')
, logger
, usage
, i;

14
examples/reload.js Normal file
View File

@ -0,0 +1,14 @@
"use strict";
var path = require('path')
, log4js = require('../lib/log4js');
log4js.configure(
// config reloading only works with file-based config (obvs)
path.join(__dirname, '../test/tape/test-config.json'),
{ reloadSecs: 10 }
);
log4js.getLogger('testing').info("Just testing");
log4js.shutdown(function() {
//callback gets you notified when log4js has finished shutting down.
});

View File

@ -1,6 +1,6 @@
'use strict';
const streams = require('../streams');
const streams = require('streamroller');
const layouts = require('../layouts');
const path = require('path');
const os = require('os');
@ -25,15 +25,15 @@ process.on('exit', () => {
function appender(
filename,
pattern,
alwaysIncludePattern,
layout,
options,
timezoneOffset
) {
layout = layout || layouts.basicLayout;
const logFile = new streams.DateRollingFileStream(
filename,
pattern,
{ alwaysIncludePattern }
options
);
openFiles.push(logFile);
@ -60,8 +60,8 @@ function configure(config, options) {
return appender(
config.filename,
config.pattern,
config.alwaysIncludePattern,
layout,
config,
config.timezoneOffset
);
}

View File

@ -1,20 +1,32 @@
'use strict';
const debug = require('debug')('log4js:file');
const layouts = require('../layouts');
const path = require('path');
const fs = require('fs');
const streams = require('../streams');
const streams = require('streamroller');
const os = require('os');
const eol = os.EOL || '\n';
const openFiles = [];
// close open files on process exit.
process.on('exit', () => {
debug('Exit handler called.');
openFiles.forEach(file => {
file.end();
});
});
// On SIGHUP, close and reopen all files. This allows this appender to work with
// logrotate. Note that if you are using logrotate, you should not set
// `logSize`.
process.on('SIGHUP', function() {
debug('SIGHUP handler called.');
openFiles.forEach(function(writer) {
writer.closeTheStream(writer.openTheStream.bind(writer));
});
});
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
@ -25,51 +37,48 @@ process.on('exit', () => {
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param compress - flag that controls log file compression
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender(file, layout, logSize, numBackups, compress, timezoneOffset) {
function fileAppender (file, layout, logSize, numBackups, options, timezoneOffset) {
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
// there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(filePath, fileSize, numFiles) {
let stream;
if (fileSize) {
stream = new streams.RollingFileStream(
filePath,
fileSize,
numFiles,
{ compress: compress }
);
} else {
stream = fs.createWriteStream(
filePath,
{
encoding: 'utf8',
mode: parseInt('0644', 8),
flags: 'a'
}
);
}
stream.on('error', err => {
console.error('log4js.fileAppender - Writing to file %s, error happened ', filePath, err);
});
return stream;
}
const logFile = openTheStream(file, logSize, numBackups);
debug("Creating file appender (",
file, ", ",
logSize, ", ",
numBackups, ", ",
options, ", ",
timezoneOffset, ")"
);
var writer = openTheStream(file, logSize, numBackups, options);
// push file to the stack of open handlers
openFiles.push(logFile);
openFiles.push(writer);
return loggingEvent => {
logFile.write(layout(loggingEvent, timezoneOffset) + eol, 'utf8');
return function(loggingEvent) {
writer.write(layout(loggingEvent, timezoneOffset) + eol, "utf8");
};
}
function openTheStream(file, fileSize, numFiles, options) {
var stream = new streams.RollingFileStream(
file,
fileSize,
numFiles,
options
);
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
function configure(config, options) {
let layout;
if (config.layout) {
@ -85,7 +94,7 @@ function configure(config, options) {
layout,
config.maxLogSize,
config.backups,
config.compress,
config,
config.timezoneOffset
);
}

View File

@ -1,6 +1,6 @@
'use strict';
const debug = require('../debug')('fileSync');
const debug = require('debug')('log4js:fileSync');
const layouts = require('../layouts');
const path = require('path');
const fs = require('fs');

View File

@ -3,8 +3,9 @@ const zlib = require('zlib');
const layouts = require('../layouts');
const levels = require('../levels');
const dgram = require('dgram');
const util = require('util');
const OS = require('os');
const debug = require('../debug')('GELF Appender');
const debug = require('debug')('log4js:gelf');
/* eslint no-unused-vars:0 */
const LOG_EMERG = 0; // system is unusable(unused)

View File

@ -1,66 +1,126 @@
/**
* logFaces appender sends JSON formatted log events to logFaces server UDP receivers.
* Events contain the following properties:
* - application name (taken from configuration)
* - host name (taken from underlying os)
* - time stamp
* - level
* - logger name (e.g. category)
* - thread name (current process id)
* - message text
*/
* logFaces appender sends JSON formatted log events to logFaces receivers.
* There are two types of receivers supported - raw UDP sockets (for server side apps),
* and HTTP (for client side apps). Depending on the usage, this appender
* requires either of the two:
*
* For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html'
* For HTTP require 'axios', see 'https://www.npmjs.com/package/axios'
*
* Make sure your project have relevant dependancy installed before using this appender.
*/
'use strict';
const dgram = require('dgram');
const layouts = require('../layouts');
const os = require('os');
const util = require('util');
const context = {};
function logFacesAppender(config, layout) {
const lfsSock = dgram.createSocket('udp4');
let localhost = '';
function datagram(config){
const sock = require('dgram').createSocket('udp4');
const host = config.remoteHost || '127.0.0.1';
const port = config.port || 55201;
if (os && os.hostname()) localhost = os.hostname().toString();
return function(event){
const buff = new Buffer(JSON.stringify(event));
sock.send(buff, 0, buff.length, port, host, function(err, bytes) {
if(err){
console.error('log4js.logFacesAppender failed to %s:%d, error: %s',
host, port, err);
}
});
};
}
let pid;
function servlet(config){
const axios = require('axios').create();
axios.defaults.baseURL = config.url;
axios.defaults.timeout = config.timeout || 5000;
axios.defaults.headers = {'Content-Type': 'application/json'};
axios.defaults.withCredentials = true;
if (process && process.pid) pid = process.pid;
return function(lfsEvent){
axios.post("", lfsEvent)
.then(function(response){
if(response.status != 200){
console.error('log4js.logFacesAppender post to %s failed: %d',
config.url, response.status);
}
})
.catch(function(response){
console.error('log4js.logFacesAppender post to %s excepted: %s',
config.url, response.status);
});
};
}
return function log(loggingEvent) {
const lfsEvent = {
a: config.application || '', // application name
h: localhost, // this host name
t: loggingEvent.startTime.getTime(), // time stamp
p: loggingEvent.level.levelStr, // level (priority)
g: loggingEvent.categoryName, // logger name
r: pid, // thread (process id)
m: layout(loggingEvent) // message text
};
/**
* For UDP (node.js) use the following configuration params:
* {
* "type": "logFacesAppender", // must be present for instantiation
* "application": "LFS-TEST", // name of the application (domain)
* "remoteHost": "127.0.0.1", // logFaces server address (hostname)
* "port": 55201 // UDP receiver listening port
* }
*
* For HTTP (browsers or node.js) use the following configuration params:
* {
* "type": "logFacesAppender", // must be present for instantiation
* "application": "LFS-TEST", // name of the application (domain)
* "url": "http://lfs-server/logs", // logFaces receiver servlet URL
* }
*/
function logFacesAppender(config) {
let send = config.send;
if(send === undefined){
send = (config.url === undefined) ? datagram(config) : servlet(config);
}
const buffer = new Buffer(JSON.stringify(lfsEvent));
const lfsHost = config.remoteHost || '127.0.0.1';
const lfsPort = config.port || 55201;
return function log(event) {
// convert to logFaces compact json format
const lfsEvent = {
a: config.application || '', // application name
t: event.startTime.getTime(), // time stamp
p: event.level.levelStr, // level (priority)
g: event.categoryName, // logger name
m: format(event.data) // message text
};
/* eslint no-unused-vars:0 */
lfsSock.send(buffer, 0, buffer.length, lfsPort, lfsHost, (err, bytes) => {
if (err) {
console.error('log4js.logFacesAppender send to %s:%d failed, error: %s',
config.host, config.port, util.inspect(err));
}
});
};
// add context variables if exist
Object.keys(context).forEach(function(key) {
lfsEvent[`p_${key}`] = context[key];
});
// send to server
send(lfsEvent);
};
}
function configure(config) {
let layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
} else {
layout = layouts.layout('pattern', { type: 'pattern', pattern: '%m' });
}
return logFacesAppender(config);
}
return logFacesAppender(config, layout);
function setContext(key, value){
context[key] = value;
}
function format(logData) {
const data = Array.isArray(logData) ?
logData : Array.prototype.slice.call(arguments);
return util.format(...wrapErrorsWithInspect(data));
}
function wrapErrorsWithInspect(items) {
return items.map(function(item) {
if ((item instanceof Error) && item.stack) {
return { inspect: function() {
return `${util.format(item)}\n${item.stack}`;
}};
} else {
return item;
}
});
}
module.exports.appender = logFacesAppender;
module.exports.configure = configure;
module.exports.setContext = setContext;

View File

@ -36,6 +36,7 @@ function logstashUDP(config, layout) {
}
}
config.fields.level = loggingEvent.level.levelStr;
config.fields.category = loggingEvent.categoryName;
const logObject = {
'@version': '1',

21
lib/appenders/stdout.js Normal file
View File

@ -0,0 +1,21 @@
"use strict";
var layouts = require('../layouts');
function stdoutAppender(layout, timezoneOffset) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
process.stdout.write(layout(loggingEvent, timezoneOffset) + '\n');
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stdoutAppender(layout, config.timezoneOffset);
}
exports.appender = stdoutAppender;
exports.configure = configure;

View File

@ -1,7 +1,7 @@
'use strict';
module.exports.ISO8601_FORMAT = 'yyyy-MM-dd hh:mm:ss.SSS';
module.exports.ISO8601_WITH_TZ_OFFSET_FORMAT = 'yyyy-MM-ddThh:mm:ssO';
module.exports.ISO8601_WITH_TZ_OFFSET_FORMAT = 'yyyy-MM-ddThh:mm:ss.SSSO';
module.exports.DATETIME_FORMAT = 'dd MM yyyy hh:mm:ss.SSS';
module.exports.ABSOLUTETIME_FORMAT = 'hh:mm:ss.SSS';

View File

@ -1,15 +0,0 @@
'use strict';
module.exports = function (label) {
let debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function (message) {
console.error('LOG4JS: (%s) %s', label, message);
};
} else {
debug = function () { };
}
return debug;
};

View File

@ -46,7 +46,14 @@ function wrapErrorsWithInspect(items) {
/* eslint prefer-rest-params:0 */
// todo: once node v4 support dropped, use rest parameter instead
function formatLogData(logData) {
const data = Array.isArray(logData) ? logData : Array.from(arguments);
let data = logData;
if (!Array.isArray(data)) {
const numArgs = arguments.length;
data = new Array(numArgs);
for (let i = 0; i < numArgs; i++) {
data[i] = arguments[i];
}
}
return util.format.apply(util, wrapErrorsWithInspect(data));
}

View File

@ -47,7 +47,7 @@ const appenderMakers = {};
const appenderShutdowns = {};
const defaultConfig = {
appenders: [
{ type: 'console' }
{ type: 'stdout' }
],
replaceConsole: false
};
@ -278,6 +278,7 @@ function loadConfigurationFile(filename) {
function configureOnceOff(config, options) {
if (config) {
try {
restoreConsole();
configureLevels(config.levels);
configureAppenders(config.appenders, options);
@ -429,6 +430,11 @@ function shutdown(cb) {
// not being able to be drained because of run-away log writes.
loggerModule.disableAllLogWrites();
//turn off config reloading
if (configState.timerId) {
clearInterval(configState.timerId);
}
// Call each of the shutdown functions in parallel
let completed = 0;
let error;

View File

@ -1,7 +0,0 @@
{
"appenders": [
{
"type": "console"
}
]
}

View File

@ -84,9 +84,7 @@ class Logger extends EventEmitter {
Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
Logger.prototype.level = levels.TRACE;
['Trace', 'Debug', 'Info', 'Warn', 'Error', 'Fatal', 'Mark'].forEach(
levelString => { addLevelMethods(levelString); }
);
['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(addLevelMethods);
function addLevelMethods(target) {
const level = levels.toLevel(target);

View File

@ -1,92 +0,0 @@
'use strict';
const fs = require('fs');
const debug = require('../debug')('BaseRollingFileStream');
const stream = require('stream');
class BaseRollingFileStream extends stream.Writable {
constructor(filename, options) {
debug('In BaseRollingFileStream');
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error('You must specify a filename');
}
}
throwErrorIfArgumentsAreNotValid();
debug('Calling BaseRollingFileStream.super');
super();
this.filename = filename;
this.options = options || {};
this.options.encoding = this.options.encoding || 'utf8';
this.options.mode = this.options.mode || parseInt('0644', 8);
this.options.flags = this.options.flags || 'a';
this.currentSize = 0;
this.openTheStream();
this.currentSize = currentFileSize(this.filename);
}
_write(chunk, encoding, callback) {
const that = this;
function writeTheChunk() {
debug('writing the chunk to the underlying stream');
that.currentSize += chunk.length;
try {
that.theStream.write(chunk, encoding, callback);
} catch (err) {
debug(err);
callback();
}
}
debug('in _write');
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename, writeTheChunk);
} else {
writeTheChunk();
}
}
openTheStream(cb) {
debug('opening the underlying stream');
this.theStream = fs.createWriteStream(this.filename, this.options);
if (cb) {
this.theStream.on('open', cb);
}
}
closeTheStream(cb) {
debug('closing the underlying stream');
this.theStream.end(cb);
}
shouldRoll() {
return false; // default behaviour is never to roll
}
roll(filename, callback) {
callback(); // default behaviour is not to do anything
}
}
function currentFileSize(file) {
let fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
module.exports = BaseRollingFileStream;

View File

@ -1,95 +0,0 @@
'use strict';
const BaseRollingFileStream = require('./BaseRollingFileStream');
const debug = require('../debug')('DateRollingFileStream');
const format = require('../date_format');
const fs = require('fs');
function findTimestampFromFileIfExists(filename, now) {
return fs.existsSync(filename) ? fs.statSync(filename).mtime : new Date(now());
}
class DateRollingFileStream extends BaseRollingFileStream {
constructor(filename, pattern, options, now) {
debug(`Now is ${now}`);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
}
pattern = pattern || '.yyyy-MM-dd';
const thisNow = now || Date.now;
const lastTimeWeWroteSomething = format.asString(
pattern,
findTimestampFromFileIfExists(filename, thisNow)
);
const baseFilename = filename;
let alwaysIncludePattern = false;
if (options) {
if (options.alwaysIncludePattern) {
alwaysIncludePattern = true;
filename = baseFilename + lastTimeWeWroteSomething;
}
delete options.alwaysIncludePattern;
if (Object.keys(options).length === 0) {
options = null;
}
}
debug(`this.now is ${thisNow}, now is ${now}`);
super(filename, options);
this.pattern = pattern;
this.now = thisNow;
this.lastTimeWeWroteSomething = lastTimeWeWroteSomething;
this.baseFilename = baseFilename;
this.alwaysIncludePattern = alwaysIncludePattern;
}
shouldRoll() {
const lastTime = this.lastTimeWeWroteSomething;
const thisTime = format.asString(this.pattern, new Date(this.now()));
debug(`DateRollingFileStream.shouldRoll with now = ${this.now()}, thisTime = ${thisTime}, lastTime = ${lastTime}`);
this.lastTimeWeWroteSomething = thisTime;
this.previousTime = lastTime;
return thisTime !== lastTime;
}
roll(filename, callback) {
debug('Starting roll');
let newFilename;
if (this.alwaysIncludePattern) {
this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
this.closeTheStream(this.openTheStream.bind(this, callback));
} else {
newFilename = this.baseFilename + this.previousTime;
this.closeTheStream(
deleteAnyExistingFile.bind(null,
renameTheCurrentFile.bind(null,
this.openTheStream.bind(this, callback))));
}
function deleteAnyExistingFile(cb) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
fs.unlink(newFilename, err => {
// ignore err: if we could not delete, it's most likely that it doesn't exist
cb(err);
});
}
function renameTheCurrentFile(cb) {
debug(`Renaming the ${filename} -> ${newFilename}`);
fs.rename(filename, newFilename, cb);
}
}
}
module.exports = DateRollingFileStream;

View File

@ -1,117 +0,0 @@
'use strict';
const BaseRollingFileStream = require('./BaseRollingFileStream');
const debug = require('../debug')('RollingFileStream');
const path = require('path');
const zlib = require('zlib');
const fs = require('fs');
class RollingFileStream extends BaseRollingFileStream {
constructor(filename, size, backups, options) {
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error('You must specify a filename and file size');
}
}
throwErrorIfArgumentsAreNotValid();
super(filename, options);
this.size = size;
this.backups = backups || 1;
}
shouldRoll() {
debug(`should roll with current size ${this.currentSize} and max size ${this.size}`);
return this.currentSize >= this.size;
}
roll(filename, callback) {
const that = this;
const nameMatcher = new RegExp(`^${path.basename(filename)}`);
function justTheseFiles(item) {
return nameMatcher.test(item);
}
function index(filename_) {
debug(`Calculating index of ${filename_}`);
return parseInt(filename_.substring((`${path.basename(filename)}.`).length), 10) || 0;
}
function byIndex(a, b) {
if (index(a) > index(b)) {
return 1;
} else if (index(a) < index(b)) {
return -1;
}
return 0;
}
function increaseFileIndex(fileToRename, cb) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (idx < that.backups) {
const ext = path.extname(fileToRename);
let destination = `${filename}.${idx + 1}`;
if (that.options.compress && /^gz$/.test(ext.substring(1))) {
destination += ext;
}
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
/* eslint no-unused-vars:0 */
fs.unlink(destination, err => {
// ignore err: if we could not delete, it's most likely that it doesn't exist
debug(`Renaming ${fileToRename} -> ${destination}`);
fs.rename(path.join(path.dirname(filename), fileToRename), destination, _err => {
if (_err) {
cb(_err);
} else {
if (that.options.compress && ext !== '.gz') {
compress(destination, cb);
} else {
cb();
}
}
});
});
} else {
cb();
}
}
function renameTheFiles(cb) {
// roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug('Renaming the old files');
fs.readdir(path.dirname(filename), (err, files) => {
const filesToProcess = files.filter(justTheseFiles).sort(byIndex);
(function processOne(_err) {
const file = filesToProcess.pop();
if (!file || _err) {
return cb(_err);
}
return increaseFileIndex(file, processOne);
}());
});
}
debug('Rolling, rolling, rolling');
this.closeTheStream(
renameTheFiles.bind(null,
this.openTheStream.bind(this, callback)));
}
}
function compress(filename, cb) {
const gzip = zlib.createGzip();
const inp = fs.createReadStream(filename);
const out = fs.createWriteStream(`${filename}.gz`);
inp.pipe(gzip).pipe(out);
fs.unlink(filename, cb);
}
module.exports = RollingFileStream;

View File

@ -1,4 +0,0 @@
'use strict';
module.exports.RollingFileStream = require('./RollingFileStream');
module.exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.6.38",
"version": "1.0.1",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
@ -22,17 +22,19 @@
"node": ">=4.0"
},
"scripts": {
"pretest": "eslint lib/**/*",
"test": "vows",
"clean": "find test -type f ! -name '*.json' ! -name '*.js' -delete && rm *.log",
"posttest": "npm run clean",
"clean": "find test -type f ! -name '*.json' ! -name '*.js' -delete && rm *.log"
"pretest": "eslint lib/**/*",
"test": "tape 'test/tape/**/*.js' && vows test/vows/*.js"
},
"directories": {
"test": "test",
"lib": "lib"
},
"dependencies": {
"semver": "^5.3.0"
"debug": "^2.2.0",
"semver": "^5.3.0",
"streamroller": "^0.2.1"
},
"devDependencies": {
"conventional-changelog": "^1.1.0",
@ -41,6 +43,7 @@
"eslint-plugin-import": "^1.5.0",
"ghooks": "^1.2.1",
"sandboxed-module": "0.3.0",
"tape": "^4.6.2",
"validate-commit-msg": "^2.6.1",
"vows": "0.7.0"
},

View File

@ -1,72 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module')
, fakeConsole = {
error: function(format, label, message) {
this.logged = [ format, label, message ];
}
}
, globals = function(debugValue) {
return {
process: {
env: {
'NODE_DEBUG': debugValue
}
},
console: fakeConsole
};
};
vows.describe('../lib/debug').addBatch({
'when NODE_DEBUG is set to log4js': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ 'globals': globals('log4js') }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should log to console.error': function(logged) {
assert.equal(logged[0], 'LOG4JS: (%s) %s');
assert.equal(logged[1], 'cheese');
assert.equal(logged[2], 'biscuits');
}
},
'when NODE_DEBUG is set to not log4js': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ globals: globals('other_module') }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should not log to console.error': function(logged) {
assert.equal(logged.length, 0);
}
},
'when NODE_DEBUG is not set': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ globals: globals(null) }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should not log to console.error': function(logged) {
assert.equal(logged.length, 0);
}
}
}).exportTo(module);

View File

@ -1,96 +0,0 @@
"use strict";
var vows = require('vows'),
assert = require('assert'),
log4js = require('../lib/log4js'),
sandbox = require('sandboxed-module');
function setupLogging(category, options) {
var udpSent = {};
var fakeDgram = {
createSocket: function (type) {
return {
send: function(buffer, offset, length, port, host, callback) {
udpSent.date = new Date();
udpSent.host = host;
udpSent.port = port;
udpSent.length = length;
udpSent.offset = 0;
udpSent.buffer = buffer;
callback(undefined, length);
}
};
}
};
var lfsModule = sandbox.require('../lib/appenders/logFacesAppender', {
requires: {
'dgram': fakeDgram
}
});
log4js.clearAppenders();
log4js.addAppender(lfsModule.configure(options), category);
return {
logger: log4js.getLogger(category),
results: udpSent
};
}
vows.describe('logFaces UDP appender').addBatch({
'when logging to logFaces UDP receiver': {
topic: function() {
var setup = setupLogging('myCategory', {
"type": "logFacesAppender",
"application": "LFS-TEST",
"remoteHost": "127.0.0.1",
"port": 55201,
"layout": {
"type": "pattern",
"pattern": "%m"
}
});
setup.logger.warn('Log event #1');
return setup;
},
'an UDP packet should be sent': function (topic) {
assert.equal(topic.results.host, "127.0.0.1");
assert.equal(topic.results.port, 55201);
assert.equal(topic.results.offset, 0);
var json = JSON.parse(topic.results.buffer.toString());
assert.equal(json.a, 'LFS-TEST');
assert.equal(json.m, 'Log event #1');
assert.equal(json.g, 'myCategory');
assert.equal(json.p, 'WARN');
// Assert timestamp, up to hours resolution.
var date = new Date(json.t);
assert.equal(
date.toISOString().substring(0, 14),
topic.results.date.toISOString().substring(0, 14)
);
}
},
'when missing options': {
topic: function() {
var setup = setupLogging('myLogger', {
"type": "logFacesAppender",
});
setup.logger.error('Log event #2');
return setup;
},
'it sets some defaults': function (topic) {
assert.equal(topic.results.host, "127.0.0.1");
assert.equal(topic.results.port, 55201);
var json = JSON.parse(topic.results.buffer.toString());
assert.equal(json.a, "");
assert.equal(json.m, 'Log event #2');
assert.equal(json.g, 'myLogger');
assert.equal(json.p, 'ERROR');
}
}
}).export(module);

View File

@ -1,35 +0,0 @@
"use strict";
var assert = require('assert')
, vows = require('vows')
, layouts = require('../lib/layouts')
, sandbox = require('sandboxed-module');
vows.describe('../lib/appenders/stderr').addBatch({
'appender': {
topic: function() {
var messages = []
, fakeProcess = {
stderr: {
write: function(msg) { messages.push(msg); }
}
}
, appenderModule = sandbox.require(
'../lib/appenders/stderr',
{
globals: {
'process': fakeProcess
}
}
)
, appender = appenderModule.appender(layouts.messagePassThroughLayout);
appender({ data: ["blah"] });
return messages;
},
'should output to stderr': function(messages) {
assert.equal(messages[0], 'blah\n');
}
}
}).exportTo(module);

View File

@ -1,68 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, sandbox = require('sandboxed-module');
vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({
'when node version > 0.10.0': {
topic: function() {
var streamLib = sandbox.load(
'../../lib/streams/BaseRollingFileStream',
{
globals: {
process: {
version: '0.10.1'
}
},
requires: {
'stream': {
Writable: function() {}
}
}
}
);
return streamLib.required;
},
'it should use the core stream module': function(required) {
assert.ok(required.stream);
assert.ok(!required['readable-stream']);
}
},
'when no filename is passed': {
topic: require('../../lib/streams/BaseRollingFileStream'),
'it should throw an error': function(BaseRollingFileStream) {
try {
new BaseRollingFileStream();
assert.fail('should not get here');
} catch (e) {
assert.ok(e);
}
}
},
'default behaviour': {
topic: function() {
var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream')
, stream = new BaseRollingFileStream('basetest.log');
return stream;
},
teardown: function() {
try {
fs.unlink('basetest.log');
} catch (e) {
console.error("could not remove basetest.log", e);
}
},
'it should not want to roll': function(stream) {
assert.isFalse(stream.shouldRoll());
},
'it should not roll': function(stream) {
var cbCalled = false;
//just calls the callback straight away, no async calls
stream.roll('basetest.log', function() { cbCalled = true; });
assert.isTrue(cbCalled);
}
}
}).exportTo(module);

View File

@ -1,221 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, streams = require('stream')
, DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11);
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
function cleanUp(filename) {
return function() {
fs.unlink(filename);
};
}
function now() {
return testTime.getTime();
}
vows.describe('DateRollingFileStream').addBatch({
'arguments': {
topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-1',
'yyyy-mm-dd.hh'
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
},
'default arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
'pattern should be .yyyy-MM-dd': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with stream arguments': {
topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-3',
'yyyy-MM-dd',
{ mode: parseInt('0666', 8) }
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, parseInt('0666', 8));
}
},
'with stream arguments but no pattern': {
topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-4',
{ mode: parseInt('0666', 8) }
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, parseInt('0666', 8));
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with a pattern of .yyyy-MM-dd': {
topic: function() {
var that = this,
stream = new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd',
null,
now
);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-date-rolling-file-stream-5') > -1;
}
).length,
2
);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
},
'with alwaysIncludePattern': {
topic: function() {
var that = this,
testTime = new Date(2012, 8, 12, 0, 10, 12),
stream = new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-pattern',
'.yyyy-MM-dd',
{alwaysIncludePattern: true},
now
);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'),
'should create a file with the pattern set': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-date-rolling-file-stream-pattern') > -1;
}
).length,
2
);
}
},
'the file with the later date': {
topic: function() {
fs.readFile(
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13',
this.callback
);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12',
this.callback
);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
}
}).exportTo(module);

View File

@ -1,201 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, events = require('events')
, fs = require('fs')
, streams = require('stream')
, RollingFileStream;
RollingFileStream = require('../../lib/streams').RollingFileStream;
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
function create(filename) {
fs.writeFileSync(filename, "test file");
}
vows.describe('RollingFileStream').addBatch({
'arguments': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size (bytes), no. backups, return Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
},
'with stream arguments': {
topic: function() {
remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream(
'test-rolling-file-stream',
1024,
5,
{ mode: parseInt('0666', 8) }
);
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, parseInt('0666', 8));
}
},
'without size': {
topic: function() {
try {
new RollingFileStream(__dirname + "/test-rolling-file-stream");
} catch (e) {
return e;
}
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
}
},
'without number of backups': {
topic: function() {
remove('test-rolling-file-stream');
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
},
'should default to 1 backup': function(stream) {
assert.equal(stream.backups, 1);
}
},
'writing less than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-file-stream-write-less",
100
);
stream.write("cheese", "utf8", function() {
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
});
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be one': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-rolling-file-stream-write-less') > -1;
}
).length,
1
);
}
}
},
'writing more than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-file-stream-write-more",
45
);
write7Cheese(that, stream);
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(
function(file) {
return file.indexOf('test-rolling-file-stream-write-more') > -1;
}
).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
}
},
'when many files already exist': {
topic: function() {
remove(__dirname + '/test-rolling-stream-with-existing-files.11');
remove(__dirname + '/test-rolling-stream-with-existing-files.20');
remove(__dirname + '/test-rolling-stream-with-existing-files.-1');
remove(__dirname + '/test-rolling-stream-with-existing-files.1.1');
remove(__dirname + '/test-rolling-stream-with-existing-files.1');
create(__dirname + '/test-rolling-stream-with-existing-files.11');
create(__dirname + '/test-rolling-stream-with-existing-files.20');
create(__dirname + '/test-rolling-stream-with-existing-files.-1');
create(__dirname + '/test-rolling-stream-with-existing-files.1.1');
create(__dirname + '/test-rolling-stream-with-existing-files.1');
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-stream-with-existing-files",
45,
5
);
write7Cheese(that, stream);
},
'the files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be rolled': function(files) {
assert.include(files, 'test-rolling-stream-with-existing-files');
assert.include(files, 'test-rolling-stream-with-existing-files.1');
assert.include(files, 'test-rolling-stream-with-existing-files.2');
assert.include(files, 'test-rolling-stream-with-existing-files.11');
assert.include(files, 'test-rolling-stream-with-existing-files.20');
}
}
}
}).exportTo(module);
function write7Cheese(that, stream) {
var streamed = 0;
[0, 1, 2, 3, 4, 5, 6].forEach(function(i) {
stream.write(i +".cheese\n", "utf8", function(e) {
streamed++;
if (e) { return that.callback(e); }
if (streamed === 7) {
stream.end();
that.callback();
}
});
});
}

View File

@ -0,0 +1,33 @@
"use strict";
var test = require('tape')
, sandbox = require('sandboxed-module');
test('default settings', function(t) {
var output = []
, log4js = sandbox.require(
'../../lib/log4js',
{
requires: {
'./appenders/stdout': {
'name': 'stdout',
'appender': function () {
return function(evt) {
output.push(evt);
};
},
'configure': function (config) {
return this.appender();
}
}
}
}
)
, logger = log4js.getLogger("default-settings");
logger.info("This should go to stdout.");
t.plan(2);
t.equal(output.length, 1, "It should log to stdout.");
t.equal(output[0].data[0], "This should go to stdout.", "It should log the message.");
t.end();
});

View File

@ -0,0 +1,37 @@
"use strict";
var test = require('tape')
, sandbox = require('sandboxed-module');
test('file appender SIGHUP', function(t) {
var closeCalled = 0
, openCalled = 0
, appender = sandbox.require(
'../../lib/appenders/file',
{
'requires': {
'streamroller': {
'RollingFileStream': function() {
this.openTheStream = function() {
openCalled++;
};
this.closeTheStream = function(cb) {
closeCalled++;
cb();
};
this.on = function() {};
}
}
}
}
).appender('sighup-test-file');
process.kill(process.pid, 'SIGHUP');
t.plan(2);
setTimeout(function() {
t.equal(openCalled, 1, 'open should be called once');
t.equal(closeCalled, 1, 'close should be called once');
t.end();
}, 10);
});

View File

@ -0,0 +1,33 @@
"use strict";
var test = require('tape')
, path = require('path')
, sandbox = require('sandboxed-module');
test('Reload configuration shutdown hook', function(t) {
var timerId
, log4js = sandbox.require(
'../../lib/log4js',
{
globals: {
clearInterval: function(id) {
timerId = id;
},
setInterval: function(fn, time) {
return "1234";
}
}
}
);
log4js.configure(
path.join(__dirname, 'test-config.json'),
{ reloadSecs: 30 }
);
t.plan(1);
log4js.shutdown(function() {
t.equal(timerId, "1234", "Shutdown should clear the reload timer");
t.end();
});
});

View File

@ -0,0 +1,22 @@
"use strict";
var test = require('tape')
, layouts = require('../../lib/layouts')
, sandbox = require('sandboxed-module');
test('stderr appender', function(t) {
var output = []
, appender = sandbox.require(
'../../lib/appenders/stderr',
{
globals: {
process: { stderr: { write : function(data) { output.push(data); } } }
}
}
).appender(layouts.messagePassThroughLayout);
appender({ data: ["biscuits"] });
t.plan(2);
t.equal(output.length, 1, 'There should be one message.');
t.equal(output[0], 'biscuits\n', 'The message should be biscuits.');
t.end();
});

View File

@ -0,0 +1,22 @@
"use strict";
var test = require('tape')
, layouts = require('../../lib/layouts')
, sandbox = require('sandboxed-module');
test('stdout appender', function(t) {
var output = []
, appender = sandbox.require(
'../../lib/appenders/stdout',
{
globals: {
process: { stdout: { write : function(data) { output.push(data); } } }
}
}
).appender(layouts.messagePassThroughLayout);
appender({ data: ["cheese"] });
t.plan(2);
t.equal(output.length, 1, 'There should be one message.');
t.equal(output[0], 'cheese\n', 'The message should be cheese.');
t.end();
});

View File

@ -0,0 +1,5 @@
{
"appenders": [
{ "type": "stdout" }
]
}

View File

@ -17,9 +17,9 @@ vows.describe('log4js categoryFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
var log4js = require('../../lib/log4js'), logEvents = [], webLogger, appLogger;
log4js.clearAppenders();
var appender = require('../lib/appenders/categoryFilter')
var appender = require('../../lib/appenders/categoryFilter')
.appender(
['app'],
function(evt) { logEvents.push(evt); }
@ -45,13 +45,13 @@ vows.describe('log4js categoryFilter').addBatch({
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger, weblogger;
remove(__dirname + '/categoryFilter-web.log');
remove(__dirname + '/categoryFilter-noweb.log');
log4js.configure('test/with-categoryFilter.json');
log4js.configure('test/vows/with-categoryFilter.json');
logger = log4js.getLogger("app");
weblogger = log4js.getLogger("web");

View File

@ -1,9 +1,9 @@
"use strict";
var assert = require('assert');
var vows = require('vows');
var layouts = require('../lib/layouts');
var layouts = require('../../lib/layouts');
var sandbox = require('sandboxed-module');
var LoggingEvent = require('../lib/logger').LoggingEvent;
var LoggingEvent = require('../../lib/logger').LoggingEvent;
var cluster = require('cluster');
vows.describe('log4js cluster appender').addBatch({
@ -42,7 +42,7 @@ vows.describe('log4js cluster appender').addBatch({
};
// Load appender and fake modules in it
var appenderModule = sandbox.require('../lib/appenders/clustered', {
var appenderModule = sandbox.require('../../lib/appenders/clustered', {
requires: {
'cluster': fakeCluster,
}
@ -119,7 +119,7 @@ vows.describe('log4js cluster appender').addBatch({
};
// Load appender and fake modules in it
var appenderModule = sandbox.require('../lib/appenders/clustered', {
var appenderModule = sandbox.require('../../lib/appenders/clustered', {
requires: {
'cluster': fakeCluster,
},

View File

@ -24,7 +24,7 @@ vows.describe('log4js configure').addBatch({
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/cheese': testAppender
@ -55,14 +55,14 @@ vows.describe('log4js configure').addBatch({
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires: { './appenders/cheese': testAppender } }
);
log4js.loadAppender('cheese');
return log4js;
},
'should load appender from ../lib/appenders': function(log4js) {
'should load appender from ../../lib/appenders': function(log4js) {
assert.ok(log4js.appenders.cheese);
},
'should add appender configure function to appenderMakers' : function(log4js) {
@ -73,7 +73,7 @@ vows.describe('log4js configure').addBatch({
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires: { 'some/other/external': testAppender } }
);
log4js.loadAppender('some/other/external');
@ -89,7 +89,7 @@ vows.describe('log4js configure').addBatch({
'when appender object loaded via loadAppender': {
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require('../lib/log4js');
log4js = sandbox.require('../../lib/log4js');
log4js.loadAppender('some/other/external', testAppender);
return log4js;
@ -131,7 +131,7 @@ vows.describe('log4js configure').addBatch({
}
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,

View File

@ -10,7 +10,7 @@
// Basic set up
var vows = require('vows');
var assert = require('assert');
var toLevel = require('../lib/levels').toLevel;
var toLevel = require('../../lib/levels').toLevel;
// uncomment one or other of the following to see progress (or not) while running the tests
// var showProgress = console.log;
@ -47,7 +47,7 @@ function getLoggerName(level) {
function getTopLevelContext(nop, configToTest, name) {
return {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
// create loggers for each level,
// keeping the level in the logger's name for traceability
strLevels.forEach(function(l) {

View File

@ -4,7 +4,7 @@ var vows = require('vows')
, assert = require('assert')
, util = require('util')
, EE = require('events').EventEmitter
, levels = require('../lib/levels');
, levels = require('../../lib/levels');
function MockLogger() {
@ -63,7 +63,7 @@ function request(cl, method, url, code, reqHeaders, resHeaders) {
vows.describe('log4js connect logger').addBatch({
'getConnectLoggerModule': {
topic: function() {
var clm = require('../lib/connect-logger');
var clm = require('../../lib/connect-logger');
return clm;
},

View File

@ -1,10 +1,10 @@
"use strict";
var assert = require('assert')
, vows = require('vows')
, layouts = require('../lib/layouts')
, layouts = require('../../lib/layouts')
, sandbox = require('sandboxed-module');
vows.describe('../lib/appenders/console').addBatch({
vows.describe('../../lib/appenders/console').addBatch({
'appender': {
topic: function() {
var messages = []
@ -12,7 +12,7 @@ vows.describe('../lib/appenders/console').addBatch({
log: function(msg) { messages.push(msg); }
}
, appenderModule = sandbox.require(
'../lib/appenders/console',
'../../lib/appenders/console',
{
globals: {
'console': fakeConsole
@ -29,5 +29,5 @@ vows.describe('../lib/appenders/console').addBatch({
assert.equal(messages[0], 'blah');
}
}
}).exportTo(module);

View File

@ -4,7 +4,7 @@ var vows = require('vows')
, path = require('path')
, fs = require('fs')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, EOL = require('os').EOL || '\n';
function removeFile(filename) {
@ -17,20 +17,20 @@ function removeFile(filename) {
};
}
vows.describe('../lib/appenders/dateFile').addBatch({
vows.describe('../../lib/appenders/dateFile').addBatch({
'appender': {
'adding multiple dateFileAppenders': {
topic: function () {
var listenersCount = process.listeners('exit').length,
dateFileAppender = require('../lib/appenders/dateFile'),
dateFileAppender = require('../../lib/appenders/dateFile'),
count = 5,
logfile;
while (count--) {
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
log4js.addAppender(dateFileAppender.appender(logfile));
}
return listenersCount;
},
teardown: function() {
@ -40,7 +40,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
removeFile('datefa-default-test3.log')();
removeFile('datefa-default-test4.log')();
},
'should only add one `exit` listener': function (initialCount) {
assert.equal(process.listeners('exit').length, initialCount + 1);
},
@ -52,7 +52,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
var exitListener
, openedFiles = []
, dateFileAppender = sandbox.require(
'../lib/appenders/dateFile',
'../../lib/appenders/dateFile',
{
globals: {
process: {
@ -62,7 +62,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}
},
requires: {
'../streams': {
'streamroller': {
DateRollingFileStream: function(filename) {
openedFiles.push(filename);
@ -71,7 +71,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
};
}
}
}
}
}
);
for (var i=0; i < 5; i += 1) {
@ -85,55 +85,55 @@ vows.describe('../lib/appenders/dateFile').addBatch({
assert.isEmpty(openedFiles);
}
},
'with default settings': {
topic: function() {
var that = this,
testFile = path.join(__dirname, 'date-appender-default.log'),
appender = require('../lib/appenders/dateFile').appender(testFile),
appender = require('../../lib/appenders/dateFile').appender(testFile),
logger = log4js.getLogger('default-settings');
log4js.clearAppenders();
log4js.addAppender(appender, 'default-settings');
logger.info("This should be in the file.");
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 100);
},
teardown: removeFile('date-appender-default.log'),
'should write to the file': function(contents) {
assert.include(contents, 'This should be in the file');
},
'should use the basic layout': function(contents) {
assert.match(
contents,
contents,
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
);
}
}
}
}).addBatch({
'configure': {
'with dateFileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
//this config file defines one file appender (to ./date-file-test.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/with-dateFile.json');
log4js.configure('test/vows/with-dateFile.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
},
teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) {
if (err) {
throw err;
@ -145,26 +145,26 @@ vows.describe('../lib/appenders/dateFile').addBatch({
'with options.alwaysIncludePattern': {
topic: function() {
var self = this
, log4js = require('../lib/log4js')
, format = require('../lib/date_format')
, log4js = require('../../lib/log4js')
, format = require('../../lib/date_format')
, logger
, options = {
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test",
"category": "tests",
"type": "dateFile",
"filename": "test/vows/date-file-test",
"pattern": "-from-MM-dd.log",
"alwaysIncludePattern": true,
"layout": {
"type": "messagePassThrough"
"layout": {
"type": "messagePassThrough"
}
}
]
}
, thisTime = format.asString(options.appenders[0].pattern, new Date());
fs.writeFileSync(
path.join(__dirname, 'date-file-test' + thisTime),
path.join(__dirname, 'date-file-test' + thisTime),
"this is existing data" + EOL,
'utf8'
);
@ -189,10 +189,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
topic: function () {
var fileOpened,
appender = sandbox.require(
'../lib/appenders/dateFile',
'../../lib/appenders/dateFile',
{ requires:
{ '../streams':
{ DateRollingFileStream:
{ 'streamroller':
{ DateRollingFileStream:
function(file) {
fileOpened = file;
return {
@ -205,10 +205,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}
);
appender.configure(
{
filename: "whatever.log",
maxLogSize: 10
},
{
filename: "whatever.log",
maxLogSize: 10
},
{ cwd: '/absolute/path/to' }
);
return fileOpened;
@ -218,6 +218,6 @@ vows.describe('../lib/appenders/dateFile').addBatch({
assert.equal(fileOpened, expected);
}
}
}
}).exportTo(module);

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, dateFormat = require('../lib/date_format');
, dateFormat = require('../../lib/date_format');
function createFixedDate() {
return new Date(2010, 0, 11, 14, 31, 30, 5);
@ -28,14 +28,14 @@ vows.describe('date_format').addBatch({
date.getTimezoneOffset = function() { return -660; };
assert.equal(
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
"2010-01-11T14:31:30+1100"
"2010-01-11T14:31:30.005+1100"
);
date = createFixedDate();
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
date.getTimezoneOffset = function() { return 120; };
assert.equal(
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
"2010-01-11T14:31:30-0200"
"2010-01-11T14:31:30.005-0200"
);
},

View File

@ -3,7 +3,7 @@ var vows = require('vows')
, fs = require('fs')
, path = require('path')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, assert = require('assert')
, zlib = require('zlib')
, EOL = require('os').EOL || '\n';
@ -26,15 +26,18 @@ vows.describe('log4js fileAppender').addBatch({
, count = 5, logfile;
while (count--) {
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
logfile = path.join(__dirname, 'fa-default-test' + count + '.log');
log4js.addAppender(
require('../../lib/appenders/file').appender(logfile),
'default-settings'
);
}
return listenersCount;
},
'does not add more than one `exit` listeners': function (initialCount) {
assert.ok(process.listeners('exit').length <= initialCount + 1);
'does not add more than one `exit` listener': function (initialCount) {
assert.equal(initialCount + 1, process.listeners('exit').length);
}
},
@ -43,17 +46,19 @@ vows.describe('log4js fileAppender').addBatch({
var exitListener
, openedFiles = []
, fileAppender = sandbox.require(
'../lib/appenders/file',
'../../lib/appenders/file',
{
globals: {
process: {
on: function(evt, listener) {
exitListener = listener;
if (evt == 'exit') {
exitListener = listener;
}
}
}
},
requires: {
'../streams': {
'streamroller': {
RollingFileStream: function(filename) {
openedFiles.push(filename);
@ -82,12 +87,15 @@ vows.describe('log4js fileAppender').addBatch({
'with default fileAppender settings': {
topic: function() {
var that = this
, testFile = path.join(__dirname, '/fa-default-test.log')
, testFile = path.join(__dirname, 'fa-default-test.log')
, logger = log4js.getLogger('default-settings');
remove(testFile);
log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
log4js.addAppender(
require('../../lib/appenders/file').appender(testFile),
'default-settings'
);
logger.info("This should be in the file.");
@ -114,10 +122,10 @@ vows.describe('log4js fileAppender').addBatch({
function addAppender(cat) {
var testFile = path.join(
__dirname,
'/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
'fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
);
remove(testFile);
log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat);
log4js.addAppender(require('../../lib/appenders/file').appender(testFile), cat);
return testFile;
}
@ -190,7 +198,7 @@ vows.describe('log4js fileAppender').addBatch({
},
'with a max file size and no backups': {
topic: function() {
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
var testFile = path.join(__dirname, 'fa-maxFileSize-test.log')
, logger = log4js.getLogger('max-file-size')
, that = this;
remove(testFile);
@ -198,7 +206,7 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 100 bytes maximum, no backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
'max-file-size'
);
logger.info("This is the first log message.");
@ -228,7 +236,7 @@ vows.describe('log4js fileAppender').addBatch({
},
'with a max file size and 2 backups': {
topic: function() {
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log')
var testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-test.log')
, logger = log4js.getLogger('max-file-size-backups');
remove(testFile);
remove(testFile+'.1');
@ -237,7 +245,7 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
'max-file-size-backups'
);
logger.info("This is the first log message.");
@ -301,7 +309,7 @@ vows.describe('log4js fileAppender').addBatch({
},
'with a max file size and 2 compressed backups': {
topic: function() {
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-compressed-test.log')
var testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-compressed-test.log')
, logger = log4js.getLogger('max-file-size-backups');
remove(testFile);
remove(testFile+'.1.gz');
@ -310,8 +318,8 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/file').appender(
testFile, log4js.layouts.basicLayout, 50, 2, true
require('../../lib/appenders/file').appender(
testFile, log4js.layouts.basicLayout, 50, 2, { compress: true }
),
'max-file-size-backups'
);
@ -380,11 +388,11 @@ vows.describe('log4js fileAppender').addBatch({
'configure' : {
'with fileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('./test/log4js.json');
log4js.configure('./test/vows/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
@ -403,7 +411,7 @@ vows.describe('log4js fileAppender').addBatch({
var consoleArgs
, errorHandler
, fileAppender = sandbox.require(
'../lib/appenders/file',
'../../lib/appenders/file',
{
globals: {
console: {
@ -413,7 +421,7 @@ vows.describe('log4js fileAppender').addBatch({
}
},
requires: {
'../streams': {
'streamroller': {
RollingFileStream: function(filename) {
this.end = function() {};

View File

@ -3,7 +3,7 @@ var vows = require('vows')
, fs = require('fs')
, path = require('path')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, assert = require('assert')
, EOL = require('os').EOL || '\n';
@ -27,7 +27,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/fileSync').appender(testFile),
require('../../lib/appenders/fileSync').appender(testFile),
'default-settings'
);
@ -55,7 +55,14 @@ vows.describe('log4js fileSyncAppender').addBatch({
//log file of 100 bytes maximum, no backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
require(
'../../lib/appenders/fileSync'
).appender(
testFile,
log4js.layouts.basicLayout,
100,
0
),
'max-file-size'
);
logger.info("This is the first log message.");
@ -92,7 +99,12 @@ vows.describe('log4js fileSyncAppender').addBatch({
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
require('../../lib/appenders/fileSync').appender(
testFile,
log4js.layouts.basicLayout,
50,
2
),
'max-file-size-backups'
);
logger.info("This is the first log message.");
@ -156,7 +168,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
'configure' : {
'with fileSyncAppender': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
//this config defines one file appender (to ./tmp-sync-tests.log)
//and sets the log level for "tests" to WARN

View File

@ -2,8 +2,8 @@
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, realLayouts = require('../lib/layouts')
, log4js = require('../../lib/log4js')
, realLayouts = require('../../lib/layouts')
, setupLogging = function(options, category, compressedLength) {
var fakeDgram = {
sent: false,
@ -56,7 +56,7 @@ var vows = require('vows')
},
messagePassThroughLayout: realLayouts.messagePassThroughLayout
}
, appender = sandbox.require('../lib/appenders/gelf', {
, appender = sandbox.require('../../lib/appenders/gelf', {
requires: {
dgram: fakeDgram,
zlib: fakeZlib,

View File

@ -5,7 +5,7 @@ var vows = require('vows')
vows.describe('log4js global loglevel').addBatch({
'global loglevel' : {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
return log4js;
},

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows'),
assert = require('assert'),
log4js = require('../lib/log4js'),
log4js = require('../../lib/log4js'),
sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -34,7 +34,7 @@ function setupLogging(category, options) {
}
};
var hipchatModule = sandbox.require('../lib/appenders/hipchat', {
var hipchatModule = sandbox.require('../../lib/appenders/hipchat', {
requires: {
'hipchat-notifier': fakeHipchatNotifier
}

View File

@ -17,7 +17,7 @@ function test(args, pattern, value) {
vows.describe('log4js layouts').addBatch({
'colouredLayout': {
topic: function() {
return require('../lib/layouts').colouredLayout;
return require('../../lib/layouts').colouredLayout;
},
'should apply level colour codes to output': function(layout) {
@ -46,7 +46,7 @@ vows.describe('log4js layouts').addBatch({
'messagePassThroughLayout': {
topic: function() {
return require('../lib/layouts').messagePassThroughLayout;
return require('../../lib/layouts').messagePassThroughLayout;
},
'should take a logevent and output only the message' : function(layout) {
assert.equal(layout({
@ -82,16 +82,24 @@ vows.describe('log4js layouts').addBatch({
}), "{ thing: 1 }");
},
'should print the stacks of a passed error objects': function(layout) {
assert.isArray(layout({
data: [ new Error() ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
, 'regexp did not return a match');
assert.isArray(
layout({
data: [ new Error() ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
}).match(
new RegExp('' +
/Error\s+at Object\..*\s+/.source +
/\((.*)test[\\\/]vows[\\\/]layouts-test\.js/.source +
/\:\d+\:\d+\)\s+at runTest/.source
)
),
'regexp did not return a match'
);
},
'with passed augmented errors': {
topic: function(layout){
@ -127,7 +135,7 @@ vows.describe('log4js layouts').addBatch({
'basicLayout': {
topic: function() {
var layout = require('../lib/layouts').basicLayout,
var layout = require('../../lib/layouts').basicLayout,
event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
@ -170,6 +178,7 @@ vows.describe('log4js layouts').addBatch({
assert.equal(lines[i+2], stack[i+1]);
}
}
},
'should output any extra data in the log event as util.inspect strings': function(args) {
var layout = args[0], event = args[1], output, lines;
@ -195,7 +204,7 @@ vows.describe('log4js layouts').addBatch({
level: {
toString: function() { return "DEBUG"; }
}
}, layout = require('../lib/layouts').patternLayout
}, layout = require('../../lib/layouts').patternLayout
, tokens = {
testString: 'testStringToken',
testFunction: function() { return 'testFunctionToken'; },
@ -243,7 +252,7 @@ vows.describe('log4js layouts').addBatch({
test(args, '%d', '2010-12-05 14:18:30.045');
},
'%d should allow for format specification': function(args) {
test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000');
test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30.045-0000');
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
test(args, '%d{ABSOLUTE}', '14:18:30.045');
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
@ -303,7 +312,7 @@ vows.describe('log4js layouts').addBatch({
}
},
'layout makers': {
topic: require('../lib/layouts'),
topic: require('../../lib/layouts'),
'should have a maker for each layout': function(layouts) {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
@ -313,7 +322,7 @@ vows.describe('log4js layouts').addBatch({
}
},
'add layout': {
topic: require('../lib/layouts'),
topic: require('../../lib/layouts'),
'should be able to add a layout': function(layouts) {
layouts.addLayout('test_layout', function(config){
assert.equal(config, 'test_config');

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels');
, levels = require('../../lib/levels');
function assertThat(level) {
function assertForEach(assertion, test, otherLevels) {

View File

@ -9,7 +9,7 @@ vows.describe('log4js-abspath').addBatch({
topic: function() {
var appenderOptions,
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires:
{ './appenders/fake':
{ name: "fake",
@ -30,7 +30,7 @@ vows.describe('log4js-abspath').addBatch({
}
]
};
log4js.configure(config, {
cwd: '/absolute/path/to'
});
@ -45,10 +45,10 @@ vows.describe('log4js-abspath').addBatch({
topic: function() {
var fileOpened,
fileAppender = sandbox.require(
'../lib/appenders/file',
'../../lib/appenders/file',
{ requires:
{ '../streams':
{ RollingFileStream:
{ 'streamroller':
{ RollingFileStream:
function(file) {
fileOpened = file;
return {
@ -61,10 +61,10 @@ vows.describe('log4js-abspath').addBatch({
}
);
fileAppender.configure(
{
filename: "whatever.log",
maxLogSize: 10
},
{
filename: "whatever.log",
maxLogSize: 10
},
{ cwd: '/absolute/path/to' }
);
return fileOpened;

View File

@ -0,0 +1,92 @@
"use strict";
var vows = require('vows'),
assert = require('assert'),
log4js = require('../../lib/log4js'),
sandbox = require('sandboxed-module');
var log = log4js.getLogger('lfstest');
function setupLogging(category, options) {
var sent = {};
function fake(event){
Object.keys(event).forEach(function(key) {
sent[key] = event[key];
});
}
var lfsModule = require('../../lib/appenders/logFacesAppender');
options.send = fake;
log4js.clearAppenders();
log4js.addAppender(lfsModule.configure(options), category);
lfsModule.setContext("foo", "bar");
lfsModule.setContext("bar", "foo");
return {
logger: log4js.getLogger(category),
results: sent
};
}
vows.describe('logFaces appender').addBatch({
'when using HTTP receivers': {
topic: function() {
var setup = setupLogging('myCategory', {
"type": "logFacesAppender",
"application": "LFS-HTTP",
"url": "http://localhost/receivers/rx1"
});
setup.logger.warn('Log event #1');
return setup;
},
'an event should be sent': function (topic) {
var event = topic.results;
assert.equal(event.a, 'LFS-HTTP');
assert.equal(event.m, 'Log event #1');
assert.equal(event.g, 'myCategory');
assert.equal(event.p, 'WARN');
assert.equal(event.p_foo, 'bar');
assert.equal(event.p_bar, 'foo');
// Assert timestamp, up to hours resolution.
var date = new Date(event.t);
assert.equal(
date.toISOString().substring(0, 14),
new Date().toISOString().substring(0, 14)
);
}
},
'when using UDP receivers': {
topic: function() {
var setup = setupLogging('udpCategory', {
"type": "logFacesAppender",
"application": "LFS-UDP",
"remoteHost": "127.0.0.1",
"port": 55201
});
setup.logger.error('Log event #2');
return setup;
},
'an event should be sent': function (topic) {
var event = topic.results;
assert.equal(event.a, 'LFS-UDP');
assert.equal(event.m, 'Log event #2');
assert.equal(event.g, 'udpCategory');
assert.equal(event.p, 'ERROR');
assert.equal(event.p_foo, 'bar');
assert.equal(event.p_bar, 'foo');
// Assert timestamp, up to hours resolution.
var date = new Date(event.t);
assert.equal(
date.toISOString().substring(0, 14),
new Date().toISOString().substring(0, 14)
);
}
}
}).export(module);

View File

@ -16,10 +16,10 @@ function remove(filename) {
vows.describe('log4js logLevelFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], logger;
var log4js = require('../../lib/log4js'), logEvents = [], logger;
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/logLevelFilter')
require('../../lib/appenders/logLevelFilter')
.appender(
'ERROR',
undefined,
@ -44,14 +44,14 @@ vows.describe('log4js logLevelFilter').addBatch({
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
remove(__dirname + '/logLevelFilter.log');
remove(__dirname + '/logLevelFilter-warnings.log');
remove(__dirname + '/logLevelFilter-debugs.log');
log4js.configure('test/with-logLevelFilter.json');
log4js.configure('test/vows/with-logLevelFilter.json');
logger = log4js.getLogger("tests");
logger.debug('debug');
logger.info('info');

View File

@ -1,11 +1,11 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels')
, loggerModule = require('../lib/logger')
, levels = require('../../lib/levels')
, loggerModule = require('../../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
vows.describe('../../lib/logger').addBatch({
'constructor with no parameters': {
topic: new Logger(),
'should use default category': function(logger) {

View File

@ -15,7 +15,7 @@ function setupConsoleTest() {
});
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
globals: {
console: fakeConsole
@ -35,7 +35,7 @@ vows.describe('log4js').addBatch({
'getBufferedLogger': {
topic: function () {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests');
return logger;
@ -54,7 +54,7 @@ vows.describe('log4js').addBatch({
'cache events': {
topic: function () {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests1');
var events = [];
@ -78,7 +78,7 @@ vows.describe('log4js').addBatch({
'log events after flush() is called': {
topic: function () {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests2');
logger.target.setLevel("TRACE");
@ -106,7 +106,7 @@ vows.describe('log4js').addBatch({
'getLogger': {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getLogger('tests');
logger.setLevel("DEBUG");
@ -162,7 +162,7 @@ vows.describe('log4js').addBatch({
shutdownCallbackCalled: false
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/file':
@ -194,7 +194,7 @@ vows.describe('log4js').addBatch({
events.shutdownCallbackCalled = true;
// Re-enable log writing so other tests that use logger are not
// affected.
require('../lib/logger').enableAllLogWrites();
require('../../lib/logger').enableAllLogWrites();
callback(null, events);
});
},
@ -220,7 +220,7 @@ vows.describe('log4js').addBatch({
topic: function() {
var appenderConfig,
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/file':
@ -254,7 +254,7 @@ vows.describe('log4js').addBatch({
'configuration that causes an error': {
topic: function() {
var log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/file':
@ -292,7 +292,7 @@ vows.describe('log4js').addBatch({
var appenderConfig,
configFilename,
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires:
{ 'fs':
{ statSync:
@ -353,24 +353,24 @@ vows.describe('log4js').addBatch({
}
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/console': fakeConsoleAppender
'./appenders/stdout': fakeConsoleAppender
}
}
);
logger = log4js.getLogger("some-logger");
logger.debug("This is a test");
},
'should default to the console appender': function(evt) {
'should default to the stdout appender': function(evt) {
assert.equal(evt.data[0], "This is a test");
}
},
'addAppender' : {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
return log4js;
},
@ -473,7 +473,7 @@ vows.describe('log4js').addBatch({
topic: function() {
var appenderEvents = [],
fakeConsole = {
'name': 'console',
'name': 'stdout',
'appender': function () {
return function(evt) {
appenderEvents.push(evt);
@ -487,10 +487,10 @@ vows.describe('log4js').addBatch({
log: function() { }
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/console': fakeConsole
'./appenders/stdout': fakeConsole
},
globals: {
console: globalConsole
@ -505,7 +505,7 @@ vows.describe('log4js').addBatch({
return appenderEvents;
},
'should configure a console appender': function(appenderEvents) {
'should configure a stdout appender': function(appenderEvents) {
assert.equal(appenderEvents[0].data[0], 'this is a test');
},
@ -607,13 +607,13 @@ vows.describe('log4js').addBatch({
'configuration persistence' : {
topic: function() {
var logEvent,
firstLog4js = require('../lib/log4js'),
firstLog4js = require('../../lib/log4js'),
secondLog4js;
firstLog4js.clearAppenders();
firstLog4js.addAppender(function(evt) { logEvent = evt; });
secondLog4js = require('../lib/log4js');
secondLog4js = require('../../lib/log4js');
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
return logEvent;
@ -625,7 +625,7 @@ vows.describe('log4js').addBatch({
'getDefaultLogger': {
topic: function() {
return require('../lib/log4js').getDefaultLogger();
return require('../../lib/log4js').getDefaultLogger();
},
'should return a logger': function(logger) {
assert.ok(logger.info);

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, sandbox = require('sandboxed-module')
;
@ -39,7 +39,7 @@ function setupLogging(category, options) {
}
};
var logglyModule = sandbox.require('../lib/appenders/loggly', {
var logglyModule = sandbox.require('../../lib/appenders/loggly', {
requires: {
'loggly': fakeLoggly,
'../layouts': fakeLayouts

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, sandbox = require('sandboxed-module')
;
@ -24,7 +24,7 @@ function setupLogging(category, options) {
}
};
var logstashModule = sandbox.require('../lib/appenders/logstashUDP', {
var logstashModule = sandbox.require('../../lib/appenders/logstashUDP', {
requires: {
'dgram': fakeDgram
}
@ -68,7 +68,8 @@ vows.describe('logstashUDP appender').addBatch({
var fields = {
field1: 'value1',
field2: 'value2',
level: 'TRACE'
level: 'TRACE',
category: 'myCategory'
};
assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
assert.equal(json.message, 'Log event #1');
@ -99,7 +100,10 @@ vows.describe('logstashUDP appender').addBatch({
'it sets some defaults': function (topic) {
var json = JSON.parse(topic.results.buffer.toString());
assert.equal(json.type, 'myLogger');
assert.equal(JSON.stringify(json.fields), JSON.stringify({'level': 'TRACE'}));
assert.equal(
JSON.stringify(json.fields),
JSON.stringify({'level': 'TRACE', 'category': 'myLogger'})
);
}
},
@ -118,7 +122,12 @@ vows.describe('logstashUDP appender').addBatch({
return setup;
},'they should be added to fields structure': function (topic) {
var json = JSON.parse(topic.results.buffer.toString());
var fields = {'extra1': 'value1', 'extra2': 'value2', 'level': 'TRACE'};
var fields = {
'extra1': 'value1',
'extra2': 'value2',
'level': 'TRACE',
'category': 'myLogger'
};
assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
}
}

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -48,7 +48,7 @@ function setupLogging(category, options) {
};
var mailgunModule = sandbox.require('../lib/appenders/mailgun', {
var mailgunModule = sandbox.require('../../lib/appenders/mailgun', {
requires: {
'mailgun-js': fakeMailgun,
'../layouts': fakeLayouts

View File

@ -59,7 +59,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -118,7 +118,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -153,7 +153,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -172,7 +172,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -252,7 +252,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -273,7 +273,7 @@ vows.describe('Multiprocess Appender').addBatch({
var results = {}
, fakeNet = makeFakeNet()
, appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet,

View File

@ -1,12 +1,12 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, Level = require('../lib/levels')
, log4js = require('../lib/log4js')
, loggerModule = require('../lib/logger')
, Level = require('../../lib/levels')
, log4js = require('../../lib/log4js')
, loggerModule = require('../../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
vows.describe('../../lib/logger').addBatch({
'creating a new log level': {
topic: function () {
Level.forName("DIAG", 6000);

View File

@ -3,7 +3,7 @@ var vows = require('vows')
, assert = require('assert')
, util = require('util')
, EE = require('events').EventEmitter
, levels = require('../lib/levels');
, levels = require('../../lib/levels');
function MockLogger() {
@ -45,7 +45,7 @@ util.inherits(MockResponse, EE);
vows.describe('log4js connect logger').addBatch({
'getConnectLoggerModule': {
topic: function() {
var clm = require('../lib/connect-logger');
var clm = require('../../lib/connect-logger');
return clm;
},

View File

@ -15,7 +15,7 @@ function setupConsoleTest() {
});
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
globals: {
console: fakeConsole
@ -75,7 +75,7 @@ vows.describe('reload configuration').addBatch({
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,
@ -113,7 +113,7 @@ vows.describe('reload configuration').addBatch({
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: {
@ -152,7 +152,7 @@ vows.describe('reload configuration').addBatch({
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,
@ -193,7 +193,7 @@ vows.describe('reload configuration').addBatch({
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: {
@ -230,7 +230,7 @@ vows.describe('reload configuration').addBatch({
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,
@ -284,7 +284,7 @@ vows.describe('reload configuration').addBatch({
'when called twice with reload options': {
topic: function() {
var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
var modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
fakeFS = {
readFileSync: function (file, encoding) {
return JSON.stringify({});
@ -310,7 +310,7 @@ vows.describe('reload configuration').addBatch({
return 1234;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,

View File

@ -10,7 +10,7 @@
// Basic set up
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var logger = log4js.getLogger('test-setLevel-asymmetry');
// uncomment one or other of the following to see progress (or not) while running the tests

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -51,7 +51,7 @@ function setupLogging(category, options) {
};
var slackModule = sandbox.require('../lib/appenders/slack', {
var slackModule = sandbox.require('../../lib/appenders/slack', {
requires: {
'slack-node': fakeSlack,
'../layouts': fakeLayouts

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -41,7 +41,7 @@ function setupLogging(category, options) {
var fakeTransportPlugin = function () {
};
var smtpModule = sandbox.require('../lib/appenders/smtp', {
var smtpModule = sandbox.require('../../lib/appenders/smtp', {
requires: {
'nodemailer': fakeMailer,
'nodemailer-sendmail-transport': fakeTransportPlugin,

View File

@ -2,8 +2,8 @@
var assert = require('assert')
, vows = require('vows')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, levels = require('../lib/levels');
, log4js = require('../../lib/log4js')
, levels = require('../../lib/levels');
vows.describe('subcategories').addBatch({
'loggers created after levels configuration is loaded': {

View File

@ -0,0 +1,23 @@
{
"appenders": [
{
"type": "categoryFilter",
"exclude": "web",
"appender": {
"type": "file",
"filename": "test/vows/categoryFilter-noweb.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "web",
"type": "file",
"filename": "test/vows/categoryFilter-web.log",
"layout": {
"type": "messagePassThrough"
}
}
]
}

View File

@ -0,0 +1,17 @@
{
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/vows/date-file-test.log",
"pattern": "-from-MM-dd",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "WARN"
}
}

View File

@ -1,15 +1,15 @@
{
"appenders": [
{
"category": "tests",
{
"category": "tests",
"type": "logLevelFilter",
"level": "WARN",
"appender": {
"type": "file",
"filename": "test/logLevelFilter-warnings.log",
"layout": {
"type": "messagePassThrough"
}
"filename": "test/vows/logLevelFilter-warnings.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
@ -19,22 +19,22 @@
"maxLevel": "DEBUG",
"appender": {
"type": "file",
"filename": "test/logLevelFilter-debugs.log",
"filename": "test/vows/logLevelFilter-debugs.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "tests",
{
"category": "tests",
"type": "file",
"filename": "test/logLevelFilter.log",
"layout": {
"type": "messagePassThrough"
}
"filename": "test/vows/logLevelFilter.log",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "TRACE"
}

View File

@ -1,23 +0,0 @@
{
"appenders": [
{
"type": "categoryFilter",
"exclude": "web",
"appender": {
"type": "file",
"filename": "test/categoryFilter-noweb.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "web",
"type": "file",
"filename": "test/categoryFilter-web.log",
"layout": {
"type": "messagePassThrough"
}
}
]
}

View File

@ -1,17 +0,0 @@
{
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test.log",
"pattern": "-from-MM-dd",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "WARN"
}
}