changed default appender to stdout, replaced streams with streamroller, started removal of vows tests

This commit is contained in:
Gareth Jones 2016-10-23 11:50:53 +11:00
parent 409ade842a
commit 2b84671471
59 changed files with 381 additions and 1220 deletions

View File

@ -1,5 +1,5 @@
"use strict";
var streams = require('../streams')
var streams = require('streamroller')
, layouts = require('../layouts')
, path = require('path')
, os = require('os')

View File

@ -2,7 +2,7 @@
var layouts = require('../layouts')
, path = require('path')
, fs = require('fs')
, streams = require('../streams')
, streams = require('streamroller')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = []

View File

@ -1,9 +1,8 @@
"use strict";
var debug = require('../debug')('fileSync')
var debug = require('debug')('log4js:fileSync')
, layouts = require('../layouts')
, path = require('path')
, fs = require('fs')
, streams = require('../streams')
, os = require('os')
, eol = os.EOL || '\n'
;

View File

@ -4,7 +4,7 @@ var layouts = require('../layouts');
var levels = require('../levels');
var dgram = require('dgram');
var util = require('util');
var debug = require('../debug')('GELF Appender');
var debug = require('debug')('log4js:gelf');
var LOG_EMERG=0; // system is unusable
var LOG_ALERT=1; // action must be taken immediately

21
lib/appenders/stdout.js Normal file
View File

@ -0,0 +1,21 @@
"use strict";
var layouts = require('../layouts');
function stdoutAppender(layout, timezoneOffset) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
process.stdout.write(layout(loggingEvent, timezoneOffset) + '\n');
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stdoutAppender(layout, config.timezoneOffset);
}
exports.appender = stdoutAppender;
exports.configure = configure;

View File

@ -1,15 +0,0 @@
"use strict";
module.exports = function(label) {
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) {
console.error('LOG4JS: (%s) %s', label, message);
};
} else {
debug = function() { };
}
return debug;
};

View File

@ -59,13 +59,11 @@ var events = require('events')
, appenderShutdowns = {}
, defaultConfig = {
appenders: [
{ type: "console" }
{ type: "stdout" }
],
replaceConsole: false
};
require('./appenders/console');
function hasLogger(logger) {
return loggers.hasOwnProperty(logger);
}
@ -115,7 +113,7 @@ function normalizeCategory (category) {
return category + '.';
}
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
var normalizedLevelCategory = normalizeCategory(levelCategory);
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory; //jshint ignore:line
@ -160,7 +158,7 @@ function getLogger (loggerCategoryName) {
}
}
/* jshint +W073 */
// Create the logger for this name if it doesn't already exist
loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
@ -183,7 +181,7 @@ function getLogger (loggerCategoryName) {
});
}
}
return loggers[loggerCategoryName];
}
@ -200,10 +198,10 @@ function addAppender () {
if (Array.isArray(args[0])) {
args = args[0];
}
args.forEach(function(appenderCategory) {
addAppenderToCategory(appender, appenderCategory);
if (appenderCategory === ALL_CATEGORIES) {
addAppenderToAllLoggers(appender);
} else {
@ -213,7 +211,7 @@ function addAppender () {
loggers[loggerCategory].addListener("log", appender);
}
}
}
});
}
@ -306,7 +304,7 @@ function configureOnceOff(config, options) {
try {
configureLevels(config.levels);
configureAppenders(config.appenders, options);
if (config.replaceConsole) {
replaceConsole();
} else {
@ -314,7 +312,7 @@ function configureOnceOff(config, options) {
}
} catch (e) {
throw new Error(
"Problem reading log4js config " + util.inspect(config) +
"Problem reading log4js config " + util.inspect(config) +
". Error was \"" + e.message + "\" (" + e.stack + ")"
);
}
@ -324,7 +322,7 @@ function configureOnceOff(config, options) {
function reloadConfiguration(options) {
var mtime = getMTime(configState.filename);
if (!mtime) return;
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
configureOnceOff(loadConfigurationFile(configState.filename), options);
}
@ -355,7 +353,7 @@ function configure(configurationFileOrObject, options) {
var config = configurationFileOrObject;
config = config || process.env.LOG4JS_CONFIG;
options = options || {};
if (config === undefined || config === null || typeof(config) === 'string') {
if (options.reloadSecs) {
initReloadConfiguration(config, options);
@ -481,19 +479,19 @@ module.exports = {
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
hasLogger: hasLogger,
addAppender: addAppender,
loadAppender: loadAppender,
clearAppenders: clearAppenders,
configure: configure,
shutdown: shutdown,
replaceConsole: replaceConsole,
restoreConsole: restoreConsole,
levels: levels,
setGlobalLogLevel: setGlobalLogLevel,
layouts: layouts,
appenders: {},
appenderMakers: appenderMakers,

View File

@ -66,11 +66,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
};
['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(
function(levelString) {
addLevelMethods(levelString);
}
);
['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(addLevelMethods);
function addLevelMethods(level) {
level = levels.toLevel(level);
@ -80,16 +76,13 @@ function addLevelMethods(level) {
var isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype['is'+isLevelMethod+'Enabled'] = function() {
return this.isLevelEnabled(level.toString());
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function () {
if (logWritesEnabled && this.isLevelEnabled(level)) {
var numArgs = arguments.length;
var args = new Array(numArgs);
for (var i = 0; i < numArgs; i++) {
args[i] = arguments[i];
}
var args = Array.prototype.slice.call(arguments);
this._log(level, args);
}
};
@ -120,4 +113,4 @@ exports.LoggingEvent = LoggingEvent;
exports.Logger = Logger;
exports.disableAllLogWrites = disableAllLogWrites;
exports.enableAllLogWrites = enableAllLogWrites;
exports.addLevelMethods = addLevelMethods;
exports.addLevelMethods = addLevelMethods;

View File

@ -1,94 +0,0 @@
"use strict";
var fs = require('fs')
, stream
, debug = require('../debug')('BaseRollingFileStream')
, util = require('util')
, semver = require('semver');
if (semver.satisfies(process.version, '>=0.10.0')) {
stream = require('stream');
} else {
stream = require('readable-stream');
}
module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || {};
this.options.encoding = this.options.encoding || 'utf8';
this.options.mode = this.options.mode || parseInt('0644', 8);
this.options.flags = this.options.flags || 'a';
this.currentSize = 0;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
}
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this);
this.openTheStream();
this.currentSize = currentFileSize(this.filename);
}
util.inherits(BaseRollingFileStream, stream.Writable);
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
var that = this;
function writeTheChunk() {
debug("writing the chunk to the underlying stream");
that.currentSize += chunk.length;
try {
that.theStream.write(chunk, encoding, callback);
}
catch (err){
debug(err);
callback();
}
}
debug("in _write");
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename, writeTheChunk);
} else {
writeTheChunk();
}
};
BaseRollingFileStream.prototype.openTheStream = function(cb) {
debug("opening the underlying stream");
this.theStream = fs.createWriteStream(this.filename, this.options);
if (cb) {
this.theStream.on("open", cb);
}
};
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
debug("closing the underlying stream");
this.theStream.end(cb);
};
BaseRollingFileStream.prototype.shouldRoll = function() {
return false; // default behaviour is never to roll
};
BaseRollingFileStream.prototype.roll = function(filename, callback) {
callback(); // default behaviour is not to do anything
};

View File

@ -1,91 +0,0 @@
"use strict";
var BaseRollingFileStream = require('./BaseRollingFileStream')
, debug = require('../debug')('DateRollingFileStream')
, format = require('../date_format')
, fs = require('fs')
, util = require('util');
module.exports = DateRollingFileStream;
function findTimestampFromFileIfExists(filename, now) {
return fs.existsSync(filename) ? fs.statSync(filename).mtime : new Date(now());
}
function DateRollingFileStream(filename, pattern, options, now) {
debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(
this.pattern,
findTimestampFromFileIfExists(filename, this.now)
);
this.baseFilename = filename;
this.alwaysIncludePattern = false;
if (options) {
if (options.alwaysIncludePattern) {
this.alwaysIncludePattern = true;
filename = this.baseFilename + this.lastTimeWeWroteSomething;
}
delete options.alwaysIncludePattern;
if (Object.keys(options).length === 0) {
options = null;
}
}
debug("this.now is " + this.now + ", now is " + now);
DateRollingFileStream.super_.call(this, filename, options);
}
util.inherits(DateRollingFileStream, BaseRollingFileStream);
DateRollingFileStream.prototype.shouldRoll = function() {
var lastTime = this.lastTimeWeWroteSomething,
thisTime = format.asString(this.pattern, new Date(this.now()));
debug("DateRollingFileStream.shouldRoll with now = " +
this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
this.lastTimeWeWroteSomething = thisTime;
this.previousTime = lastTime;
return thisTime !== lastTime;
};
DateRollingFileStream.prototype.roll = function(filename, callback) {
var that = this;
debug("Starting roll");
if (this.alwaysIncludePattern) {
this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
this.closeTheStream(this.openTheStream.bind(this, callback));
} else {
var newFilename = this.baseFilename + this.previousTime;
this.closeTheStream(
deleteAnyExistingFile.bind(null,
renameTheCurrentFile.bind(null,
this.openTheStream.bind(this,
callback))));
}
function deleteAnyExistingFile(cb) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(newFilename, function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
cb();
});
}
function renameTheCurrentFile(cb) {
debug("Renaming the " + filename + " -> " + newFilename);
fs.rename(filename, newFilename, cb);
}
};

View File

@ -1,117 +0,0 @@
"use strict";
var BaseRollingFileStream = require('./BaseRollingFileStream')
, debug = require('../debug')('RollingFileStream')
, util = require('util')
, path = require('path')
, child_process = require('child_process')
, zlib = require("zlib")
, fs = require('fs');
module.exports = RollingFileStream;
function RollingFileStream (filename, size, backups, options) {
this.size = size;
this.backups = backups || 1;
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error("You must specify a filename and file size");
}
}
throwErrorIfArgumentsAreNotValid();
RollingFileStream.super_.call(this, filename, options);
}
util.inherits(RollingFileStream, BaseRollingFileStream);
RollingFileStream.prototype.shouldRoll = function() {
debug("should roll with current size " + this.currentSize + " and max size " + this.size);
return this.currentSize >= this.size;
};
RollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
nameMatcher = new RegExp('^' + path.basename(filename));
function justTheseFiles (item) {
return nameMatcher.test(item);
}
function index(filename_) {
debug('Calculating index of '+filename_);
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
}
function byIndex(a, b) {
if (index(a) > index(b)) {
return 1;
} else if (index(a) < index(b) ) {
return -1;
} else {
return 0;
}
}
function compress (filename, cb) {
var gzip = zlib.createGzip();
var inp = fs.createReadStream(filename);
var out = fs.createWriteStream(filename+".gz");
inp.pipe(gzip).pipe(out);
fs.unlink(filename, cb);
}
function increaseFileIndex (fileToRename, cb) {
var idx = index(fileToRename);
debug('Index of ' + fileToRename + ' is ' + idx);
if (idx < that.backups) {
var ext = path.extname(fileToRename);
var destination = filename + '.' + (idx+1);
if (that.options.compress && /^gz$/.test(ext.substring(1))) {
destination+=ext;
}
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(destination, function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
debug('Renaming ' + fileToRename + ' -> ' + destination);
fs.rename(path.join(path.dirname(filename), fileToRename), destination, function(err) {
if (err) {
cb(err);
} else {
if (that.options.compress && ext!=".gz") {
compress(destination, cb);
} else {
cb();
}
}
});
});
} else {
cb();
}
}
function renameTheFiles(cb) {
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug("Renaming the old files");
fs.readdir(path.dirname(filename), function (err, files) {
var filesToProcess = files.filter(justTheseFiles).sort(byIndex);
(function processOne(err) {
var file = filesToProcess.pop();
if (!file || err) { return cb(err); }
increaseFileIndex(file, processOne);
})();
});
}
debug("Rolling, rolling, rolling");
this.closeTheStream(
renameTheFiles.bind(null,
this.openTheStream.bind(this,
callback)));
};

View File

@ -1,3 +0,0 @@
"use strict";
exports.RollingFileStream = require('./RollingFileStream');
exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.6.38",
"version": "1.0.0",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
@ -23,19 +23,20 @@
},
"scripts": {
"pretest": "jshint lib/ test/",
"test": "vows"
"test": "tape 'test/tape/**/*.js' && vows test/vows/*.js"
},
"directories": {
"test": "test",
"lib": "lib"
},
"dependencies": {
"readable-stream": "~1.0.2",
"semver": "~4.3.3"
"debug": "^2.2.0",
"streamroller": "^0.1.0"
},
"devDependencies": {
"jshint": "^2.9.2",
"sandboxed-module": "0.1.3",
"tape": "^4.6.2",
"vows": "0.7.0"
},
"browser": {

View File

@ -1,72 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module')
, fakeConsole = {
error: function(format, label, message) {
this.logged = [ format, label, message ];
}
}
, globals = function(debugValue) {
return {
process: {
env: {
'NODE_DEBUG': debugValue
}
},
console: fakeConsole
};
};
vows.describe('../lib/debug').addBatch({
'when NODE_DEBUG is set to log4js': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ 'globals': globals('log4js') }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should log to console.error': function(logged) {
assert.equal(logged[0], 'LOG4JS: (%s) %s');
assert.equal(logged[1], 'cheese');
assert.equal(logged[2], 'biscuits');
}
},
'when NODE_DEBUG is set to not log4js': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ globals: globals('other_module') }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should not log to console.error': function(logged) {
assert.equal(logged.length, 0);
}
},
'when NODE_DEBUG is not set': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ globals: globals(null) }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should not log to console.error': function(logged) {
assert.equal(logged.length, 0);
}
}
}).exportTo(module);

View File

@ -1,35 +0,0 @@
"use strict";
var assert = require('assert')
, vows = require('vows')
, layouts = require('../lib/layouts')
, sandbox = require('sandboxed-module');
vows.describe('../lib/appenders/stderr').addBatch({
'appender': {
topic: function() {
var messages = []
, fakeProcess = {
stderr: {
write: function(msg) { messages.push(msg); }
}
}
, appenderModule = sandbox.require(
'../lib/appenders/stderr',
{
globals: {
'process': fakeProcess
}
}
)
, appender = appenderModule.appender(layouts.messagePassThroughLayout);
appender({ data: ["blah"] });
return messages;
},
'should output to stderr': function(messages) {
assert.equal(messages[0], 'blah\n');
}
}
}).exportTo(module);

View File

@ -1,93 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, sandbox = require('sandboxed-module');
vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({
'when node version < 0.10.0': {
topic: function() {
var streamLib = sandbox.load(
'../../lib/streams/BaseRollingFileStream',
{
globals: {
process: {
version: '0.8.11'
}
},
requires: {
'readable-stream': {
Writable: function() {}
}
}
}
);
return streamLib.required;
},
'it should use readable-stream to maintain compatibility': function(required) {
assert.ok(required['readable-stream']);
assert.ok(!required.stream);
}
},
'when node version > 0.10.0': {
topic: function() {
var streamLib = sandbox.load(
'../../lib/streams/BaseRollingFileStream',
{
globals: {
process: {
version: '0.10.1'
}
},
requires: {
'stream': {
Writable: function() {}
}
}
}
);
return streamLib.required;
},
'it should use the core stream module': function(required) {
assert.ok(required.stream);
assert.ok(!required['readable-stream']);
}
},
'when no filename is passed': {
topic: require('../../lib/streams/BaseRollingFileStream'),
'it should throw an error': function(BaseRollingFileStream) {
try {
new BaseRollingFileStream();
assert.fail('should not get here');
} catch (e) {
assert.ok(e);
}
}
},
'default behaviour': {
topic: function() {
var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream')
, stream = new BaseRollingFileStream('basetest.log');
return stream;
},
teardown: function() {
try {
fs.unlink('basetest.log');
} catch (e) {
console.error("could not remove basetest.log", e);
}
},
'it should not want to roll': function(stream) {
assert.isFalse(stream.shouldRoll());
},
'it should not roll': function(stream) {
var cbCalled = false;
//just calls the callback straight away, no async calls
stream.roll('basetest.log', function() { cbCalled = true; });
assert.isTrue(cbCalled);
}
}
}).exportTo(module);

View File

@ -1,227 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, semver = require('semver')
, streams
, DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11);
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('stream');
} else {
streams = require('readable-stream');
}
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
function cleanUp(filename) {
return function() {
fs.unlink(filename);
};
}
function now() {
return testTime.getTime();
}
vows.describe('DateRollingFileStream').addBatch({
'arguments': {
topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-1',
'yyyy-mm-dd.hh'
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
},
'default arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
'pattern should be .yyyy-MM-dd': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with stream arguments': {
topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-3',
'yyyy-MM-dd',
{ mode: parseInt('0666', 8) }
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, parseInt('0666', 8));
}
},
'with stream arguments but no pattern': {
topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-4',
{ mode: parseInt('0666', 8) }
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, parseInt('0666', 8));
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with a pattern of .yyyy-MM-dd': {
topic: function() {
var that = this,
stream = new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd',
null,
now
);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-date-rolling-file-stream-5') > -1;
}
).length,
2
);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
},
'with alwaysIncludePattern': {
topic: function() {
var that = this,
testTime = new Date(2012, 8, 12, 0, 10, 12),
stream = new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-pattern',
'.yyyy-MM-dd',
{alwaysIncludePattern: true},
now
);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'),
'should create a file with the pattern set': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-date-rolling-file-stream-pattern') > -1;
}
).length,
2
);
}
},
'the file with the later date': {
topic: function() {
fs.readFile(
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13',
this.callback
);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12',
this.callback
);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
}
}).exportTo(module);

View File

@ -1,207 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, events = require('events')
, fs = require('fs')
, semver = require('semver')
, streams
, RollingFileStream;
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('stream');
} else {
streams = require('readable-stream');
}
RollingFileStream = require('../../lib/streams').RollingFileStream;
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
function create(filename) {
fs.writeFileSync(filename, "test file");
}
vows.describe('RollingFileStream').addBatch({
'arguments': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size (bytes), no. backups, return Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
},
'with stream arguments': {
topic: function() {
remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream(
'test-rolling-file-stream',
1024,
5,
{ mode: parseInt('0666', 8) }
);
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, parseInt('0666', 8));
}
},
'without size': {
topic: function() {
try {
new RollingFileStream(__dirname + "/test-rolling-file-stream");
} catch (e) {
return e;
}
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
}
},
'without number of backups': {
topic: function() {
remove('test-rolling-file-stream');
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
},
'should default to 1 backup': function(stream) {
assert.equal(stream.backups, 1);
}
},
'writing less than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-file-stream-write-less",
100
);
stream.write("cheese", "utf8", function() {
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
});
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be one': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-rolling-file-stream-write-less') > -1;
}
).length,
1
);
}
}
},
'writing more than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-file-stream-write-more",
45
);
write7Cheese(that, stream);
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(
function(file) {
return file.indexOf('test-rolling-file-stream-write-more') > -1;
}
).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
}
},
'when many files already exist': {
topic: function() {
remove(__dirname + '/test-rolling-stream-with-existing-files.11');
remove(__dirname + '/test-rolling-stream-with-existing-files.20');
remove(__dirname + '/test-rolling-stream-with-existing-files.-1');
remove(__dirname + '/test-rolling-stream-with-existing-files.1.1');
remove(__dirname + '/test-rolling-stream-with-existing-files.1');
create(__dirname + '/test-rolling-stream-with-existing-files.11');
create(__dirname + '/test-rolling-stream-with-existing-files.20');
create(__dirname + '/test-rolling-stream-with-existing-files.-1');
create(__dirname + '/test-rolling-stream-with-existing-files.1.1');
create(__dirname + '/test-rolling-stream-with-existing-files.1');
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-stream-with-existing-files",
45,
5
);
write7Cheese(that, stream);
},
'the files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be rolled': function(files) {
assert.include(files, 'test-rolling-stream-with-existing-files');
assert.include(files, 'test-rolling-stream-with-existing-files.1');
assert.include(files, 'test-rolling-stream-with-existing-files.2');
assert.include(files, 'test-rolling-stream-with-existing-files.11');
assert.include(files, 'test-rolling-stream-with-existing-files.20');
}
}
}
}).exportTo(module);
function write7Cheese(that, stream) {
var streamed = 0;
[0, 1, 2, 3, 4, 5, 6].forEach(function(i) {
stream.write(i +".cheese\n", "utf8", function(e) {
streamed++;
if (e) { return that.callback(e); }
if (streamed === 7) {
stream.end();
that.callback();
}
});
});
}

View File

@ -0,0 +1,33 @@
"use strict";
var test = require('tape')
, sandbox = require('sandboxed-module');
test('default settings', function(t) {
var output = []
, log4js = sandbox.require(
'../../lib/log4js',
{
requires: {
'./appenders/stdout': {
'name': 'stdout',
'appender': function () {
return function(evt) {
output.push(evt);
};
},
'configure': function (config) {
return this.appender();
}
}
}
}
)
, logger = log4js.getLogger("default-settings");
logger.info("This should go to stdout.");
t.plan(2);
t.equal(output.length, 1, "It should log to stdout.");
t.equal(output[0].data[0], "This should go to stdout.", "It should log the message.");
t.end();
});

View File

@ -0,0 +1,22 @@
"use strict";
var test = require('tape')
, layouts = require('../../lib/layouts')
, sandbox = require('sandboxed-module');
test('stderr appender', function(t) {
var output = []
, appender = sandbox.require(
'../../lib/appenders/stderr',
{
globals: {
process: { stderr: { write : function(data) { output.push(data); } } }
}
}
).appender(layouts.messagePassThroughLayout);
appender({ data: ["biscuits"] });
t.plan(2);
t.equal(output.length, 1, 'There should be one message.');
t.equal(output[0], 'biscuits\n', 'The message should be biscuits.');
t.end();
});

View File

@ -0,0 +1,22 @@
"use strict";
var test = require('tape')
, layouts = require('../../lib/layouts')
, sandbox = require('sandboxed-module');
test('stdout appender', function(t) {
var output = []
, appender = sandbox.require(
'../../lib/appenders/stdout',
{
globals: {
process: { stdout: { write : function(data) { output.push(data); } } }
}
}
).appender(layouts.messagePassThroughLayout);
appender({ data: ["cheese"] });
t.plan(2);
t.equal(output.length, 1, 'There should be one message.');
t.equal(output[0], 'cheese\n', 'The message should be cheese.');
t.end();
});

View File

@ -17,9 +17,9 @@ vows.describe('log4js categoryFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
var log4js = require('../../lib/log4js'), logEvents = [], webLogger, appLogger;
log4js.clearAppenders();
var appender = require('../lib/appenders/categoryFilter')
var appender = require('../../lib/appenders/categoryFilter')
.appender(
['app'],
function(evt) { logEvents.push(evt); }
@ -45,13 +45,13 @@ vows.describe('log4js categoryFilter').addBatch({
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger, weblogger;
remove(__dirname + '/categoryFilter-web.log');
remove(__dirname + '/categoryFilter-noweb.log');
log4js.configure('test/with-categoryFilter.json');
log4js.configure('test/vows/with-categoryFilter.json');
logger = log4js.getLogger("app");
weblogger = log4js.getLogger("web");

View File

@ -1,9 +1,9 @@
"use strict";
var assert = require('assert');
var vows = require('vows');
var layouts = require('../lib/layouts');
var layouts = require('../../lib/layouts');
var sandbox = require('sandboxed-module');
var LoggingEvent = require('../lib/logger').LoggingEvent;
var LoggingEvent = require('../../lib/logger').LoggingEvent;
var cluster = require('cluster');
vows.describe('log4js cluster appender').addBatch({
@ -42,7 +42,7 @@ vows.describe('log4js cluster appender').addBatch({
};
// Load appender and fake modules in it
var appenderModule = sandbox.require('../lib/appenders/clustered', {
var appenderModule = sandbox.require('../../lib/appenders/clustered', {
requires: {
'cluster': fakeCluster,
}
@ -119,7 +119,7 @@ vows.describe('log4js cluster appender').addBatch({
};
// Load appender and fake modules in it
var appenderModule = sandbox.require('../lib/appenders/clustered', {
var appenderModule = sandbox.require('../../lib/appenders/clustered', {
requires: {
'cluster': fakeCluster,
},

View File

@ -24,7 +24,7 @@ vows.describe('log4js configure').addBatch({
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/cheese': testAppender
@ -55,14 +55,14 @@ vows.describe('log4js configure').addBatch({
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires: { './appenders/cheese': testAppender } }
);
log4js.loadAppender('cheese');
return log4js;
},
'should load appender from ../lib/appenders': function(log4js) {
'should load appender from ../../lib/appenders': function(log4js) {
assert.ok(log4js.appenders.cheese);
},
'should add appender configure function to appenderMakers' : function(log4js) {
@ -73,7 +73,7 @@ vows.describe('log4js configure').addBatch({
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires: { 'some/other/external': testAppender } }
);
log4js.loadAppender('some/other/external');
@ -89,7 +89,7 @@ vows.describe('log4js configure').addBatch({
'when appender object loaded via loadAppender': {
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require('../lib/log4js');
log4js = sandbox.require('../../lib/log4js');
log4js.loadAppender('some/other/external', testAppender);
return log4js;
@ -131,7 +131,7 @@ vows.describe('log4js configure').addBatch({
}
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,

View File

@ -10,7 +10,7 @@
// Basic set up
var vows = require('vows');
var assert = require('assert');
var toLevel = require('../lib/levels').toLevel;
var toLevel = require('../../lib/levels').toLevel;
// uncomment one or other of the following to see progress (or not) while running the tests
// var showProgress = console.log;
@ -47,7 +47,7 @@ function getLoggerName(level) {
function getTopLevelContext(nop, configToTest, name) {
return {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
// create loggers for each level,
// keeping the level in the logger's name for traceability
strLevels.forEach(function(l) {

View File

@ -4,7 +4,7 @@ var vows = require('vows')
, assert = require('assert')
, util = require('util')
, EE = require('events').EventEmitter
, levels = require('../lib/levels');
, levels = require('../../lib/levels');
function MockLogger() {
@ -63,7 +63,7 @@ function request(cl, method, url, code, reqHeaders, resHeaders) {
vows.describe('log4js connect logger').addBatch({
'getConnectLoggerModule': {
topic: function() {
var clm = require('../lib/connect-logger');
var clm = require('../../lib/connect-logger');
return clm;
},

View File

@ -1,10 +1,10 @@
"use strict";
var assert = require('assert')
, vows = require('vows')
, layouts = require('../lib/layouts')
, layouts = require('../../lib/layouts')
, sandbox = require('sandboxed-module');
vows.describe('../lib/appenders/console').addBatch({
vows.describe('../../lib/appenders/console').addBatch({
'appender': {
topic: function() {
var messages = []
@ -12,7 +12,7 @@ vows.describe('../lib/appenders/console').addBatch({
log: function(msg) { messages.push(msg); }
}
, appenderModule = sandbox.require(
'../lib/appenders/console',
'../../lib/appenders/console',
{
globals: {
'console': fakeConsole
@ -29,5 +29,5 @@ vows.describe('../lib/appenders/console').addBatch({
assert.equal(messages[0], 'blah');
}
}
}).exportTo(module);

View File

@ -4,7 +4,7 @@ var vows = require('vows')
, path = require('path')
, fs = require('fs')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, EOL = require('os').EOL || '\n';
function removeFile(filename) {
@ -17,20 +17,20 @@ function removeFile(filename) {
};
}
vows.describe('../lib/appenders/dateFile').addBatch({
vows.describe('../../lib/appenders/dateFile').addBatch({
'appender': {
'adding multiple dateFileAppenders': {
topic: function () {
var listenersCount = process.listeners('exit').length,
dateFileAppender = require('../lib/appenders/dateFile'),
dateFileAppender = require('../../lib/appenders/dateFile'),
count = 5,
logfile;
while (count--) {
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
log4js.addAppender(dateFileAppender.appender(logfile));
}
return listenersCount;
},
teardown: function() {
@ -40,7 +40,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
removeFile('datefa-default-test3.log')();
removeFile('datefa-default-test4.log')();
},
'should only add one `exit` listener': function (initialCount) {
assert.equal(process.listeners('exit').length, initialCount + 1);
},
@ -52,7 +52,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
var exitListener
, openedFiles = []
, dateFileAppender = sandbox.require(
'../lib/appenders/dateFile',
'../../lib/appenders/dateFile',
{
globals: {
process: {
@ -62,7 +62,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}
},
requires: {
'../streams': {
'streamroller': {
DateRollingFileStream: function(filename) {
openedFiles.push(filename);
@ -71,7 +71,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
};
}
}
}
}
}
);
for (var i=0; i < 5; i += 1) {
@ -85,55 +85,55 @@ vows.describe('../lib/appenders/dateFile').addBatch({
assert.isEmpty(openedFiles);
}
},
'with default settings': {
topic: function() {
var that = this,
testFile = path.join(__dirname, 'date-appender-default.log'),
appender = require('../lib/appenders/dateFile').appender(testFile),
appender = require('../../lib/appenders/dateFile').appender(testFile),
logger = log4js.getLogger('default-settings');
log4js.clearAppenders();
log4js.addAppender(appender, 'default-settings');
logger.info("This should be in the file.");
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 100);
},
teardown: removeFile('date-appender-default.log'),
'should write to the file': function(contents) {
assert.include(contents, 'This should be in the file');
},
'should use the basic layout': function(contents) {
assert.match(
contents,
contents,
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
);
}
}
}
}).addBatch({
'configure': {
'with dateFileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
//this config file defines one file appender (to ./date-file-test.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/with-dateFile.json');
log4js.configure('test/vows/with-dateFile.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
},
teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) {
if (err) {
throw err;
@ -145,26 +145,26 @@ vows.describe('../lib/appenders/dateFile').addBatch({
'with options.alwaysIncludePattern': {
topic: function() {
var self = this
, log4js = require('../lib/log4js')
, format = require('../lib/date_format')
, log4js = require('../../lib/log4js')
, format = require('../../lib/date_format')
, logger
, options = {
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test",
"category": "tests",
"type": "dateFile",
"filename": "test/vows/date-file-test",
"pattern": "-from-MM-dd.log",
"alwaysIncludePattern": true,
"layout": {
"type": "messagePassThrough"
"layout": {
"type": "messagePassThrough"
}
}
]
}
, thisTime = format.asString(options.appenders[0].pattern, new Date());
fs.writeFileSync(
path.join(__dirname, 'date-file-test' + thisTime),
path.join(__dirname, 'date-file-test' + thisTime),
"this is existing data" + EOL,
'utf8'
);
@ -189,10 +189,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
topic: function () {
var fileOpened,
appender = sandbox.require(
'../lib/appenders/dateFile',
'../../lib/appenders/dateFile',
{ requires:
{ '../streams':
{ DateRollingFileStream:
{ 'streamroller':
{ DateRollingFileStream:
function(file) {
fileOpened = file;
return {
@ -205,10 +205,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}
);
appender.configure(
{
filename: "whatever.log",
maxLogSize: 10
},
{
filename: "whatever.log",
maxLogSize: 10
},
{ cwd: '/absolute/path/to' }
);
return fileOpened;
@ -218,6 +218,6 @@ vows.describe('../lib/appenders/dateFile').addBatch({
assert.equal(fileOpened, expected);
}
}
}
}).exportTo(module);

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, dateFormat = require('../lib/date_format');
, dateFormat = require('../../lib/date_format');
function createFixedDate() {
return new Date(2010, 0, 11, 14, 31, 30, 5);

View File

@ -3,7 +3,7 @@ var vows = require('vows')
, fs = require('fs')
, path = require('path')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, assert = require('assert')
, zlib = require('zlib')
, EOL = require('os').EOL || '\n';
@ -27,7 +27,10 @@ vows.describe('log4js fileAppender').addBatch({
while (count--) {
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
log4js.addAppender(
require('../../lib/appenders/file').appender(logfile),
'default-settings'
);
}
return listenersCount;
@ -43,7 +46,7 @@ vows.describe('log4js fileAppender').addBatch({
var exitListener
, openedFiles = []
, fileAppender = sandbox.require(
'../lib/appenders/file',
'../../lib/appenders/file',
{
globals: {
process: {
@ -53,7 +56,7 @@ vows.describe('log4js fileAppender').addBatch({
}
},
requires: {
'../streams': {
'streamroller': {
RollingFileStream: function(filename) {
openedFiles.push(filename);
@ -82,12 +85,15 @@ vows.describe('log4js fileAppender').addBatch({
'with default fileAppender settings': {
topic: function() {
var that = this
, testFile = path.join(__dirname, '/fa-default-test.log')
, testFile = path.join(__dirname, 'fa-default-test.log')
, logger = log4js.getLogger('default-settings');
remove(testFile);
log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
log4js.addAppender(
require('../../lib/appenders/file').appender(testFile),
'default-settings'
);
logger.info("This should be in the file.");
@ -114,10 +120,10 @@ vows.describe('log4js fileAppender').addBatch({
function addAppender(cat) {
var testFile = path.join(
__dirname,
'/fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
'fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
);
remove(testFile);
log4js.addAppender(require('../lib/appenders/file').appender(testFile), cat);
log4js.addAppender(require('../../lib/appenders/file').appender(testFile), cat);
return testFile;
}
@ -198,7 +204,7 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 100 bytes maximum, no backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
'max-file-size'
);
logger.info("This is the first log message.");
@ -237,7 +243,7 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
'max-file-size-backups'
);
logger.info("This is the first log message.");
@ -310,7 +316,7 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/file').appender(
require('../../lib/appenders/file').appender(
testFile, log4js.layouts.basicLayout, 50, 2, true
),
'max-file-size-backups'
@ -380,11 +386,11 @@ vows.describe('log4js fileAppender').addBatch({
'configure' : {
'with fileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('./test/log4js.json');
log4js.configure('./test/vows/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
@ -403,7 +409,7 @@ vows.describe('log4js fileAppender').addBatch({
var consoleArgs
, errorHandler
, fileAppender = sandbox.require(
'../lib/appenders/file',
'../../lib/appenders/file',
{
globals: {
console: {
@ -413,7 +419,7 @@ vows.describe('log4js fileAppender').addBatch({
}
},
requires: {
'../streams': {
'streamroller': {
RollingFileStream: function(filename) {
this.end = function() {};

View File

@ -3,7 +3,7 @@ var vows = require('vows')
, fs = require('fs')
, path = require('path')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, assert = require('assert')
, EOL = require('os').EOL || '\n';
@ -27,7 +27,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/fileSync').appender(testFile),
require('../../lib/appenders/fileSync').appender(testFile),
'default-settings'
);
@ -55,7 +55,14 @@ vows.describe('log4js fileSyncAppender').addBatch({
//log file of 100 bytes maximum, no backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 100, 0),
require(
'../../lib/appenders/fileSync'
).appender(
testFile,
log4js.layouts.basicLayout,
100,
0
),
'max-file-size'
);
logger.info("This is the first log message.");
@ -92,7 +99,12 @@ vows.describe('log4js fileSyncAppender').addBatch({
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/fileSync').appender(testFile, log4js.layouts.basicLayout, 50, 2),
require('../../lib/appenders/fileSync').appender(
testFile,
log4js.layouts.basicLayout,
50,
2
),
'max-file-size-backups'
);
logger.info("This is the first log message.");
@ -156,7 +168,7 @@ vows.describe('log4js fileSyncAppender').addBatch({
'configure' : {
'with fileSyncAppender': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
//this config defines one file appender (to ./tmp-sync-tests.log)
//and sets the log level for "tests" to WARN

View File

@ -2,8 +2,8 @@
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, realLayouts = require('../lib/layouts')
, log4js = require('../../lib/log4js')
, realLayouts = require('../../lib/layouts')
, setupLogging = function(options, category, compressedLength) {
var fakeDgram = {
sent: false,
@ -56,7 +56,7 @@ var vows = require('vows')
},
messagePassThroughLayout: realLayouts.messagePassThroughLayout
}
, appender = sandbox.require('../lib/appenders/gelf', {
, appender = sandbox.require('../../lib/appenders/gelf', {
requires: {
dgram: fakeDgram,
zlib: fakeZlib,

View File

@ -5,7 +5,7 @@ var vows = require('vows')
vows.describe('log4js global loglevel').addBatch({
'global loglevel' : {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
return log4js;
},

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows'),
assert = require('assert'),
log4js = require('../lib/log4js'),
log4js = require('../../lib/log4js'),
sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -34,7 +34,7 @@ function setupLogging(category, options) {
}
};
var hipchatModule = sandbox.require('../lib/appenders/hipchat', {
var hipchatModule = sandbox.require('../../lib/appenders/hipchat', {
requires: {
'hipchat-notifier': fakeHipchatNotifier
}

View File

@ -17,7 +17,7 @@ function test(args, pattern, value) {
vows.describe('log4js layouts').addBatch({
'colouredLayout': {
topic: function() {
return require('../lib/layouts').colouredLayout;
return require('../../lib/layouts').colouredLayout;
},
'should apply level colour codes to output': function(layout) {
@ -46,7 +46,7 @@ vows.describe('log4js layouts').addBatch({
'messagePassThroughLayout': {
topic: function() {
return require('../lib/layouts').messagePassThroughLayout;
return require('../../lib/layouts').messagePassThroughLayout;
},
'should take a logevent and output only the message' : function(layout) {
assert.equal(layout({
@ -82,16 +82,24 @@ vows.describe('log4js layouts').addBatch({
}), "{ thing: 1 }");
},
'should print the stacks of a passed error objects': function(layout) {
assert.isArray(layout({
data: [ new Error() ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
, 'regexp did not return a match');
assert.isArray(
layout({
data: [ new Error() ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
}).match(
new RegExp('' +
/Error\s+at Object\..*\s+/.source +
/\((.*)test[\\\/]vows[\\\/]layouts-test\.js/.source +
/\:\d+\:\d+\)\s+at runTest/.source
)
),
'regexp did not return a match'
);
},
'with passed augmented errors': {
topic: function(layout){
@ -127,7 +135,7 @@ vows.describe('log4js layouts').addBatch({
'basicLayout': {
topic: function() {
var layout = require('../lib/layouts').basicLayout,
var layout = require('../../lib/layouts').basicLayout,
event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
@ -196,7 +204,7 @@ vows.describe('log4js layouts').addBatch({
level: {
toString: function() { return "DEBUG"; }
}
}, layout = require('../lib/layouts').patternLayout
}, layout = require('../../lib/layouts').patternLayout
, tokens = {
testString: 'testStringToken',
testFunction: function() { return 'testFunctionToken'; },
@ -304,7 +312,7 @@ vows.describe('log4js layouts').addBatch({
}
},
'layout makers': {
topic: require('../lib/layouts'),
topic: require('../../lib/layouts'),
'should have a maker for each layout': function(layouts) {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
@ -314,7 +322,7 @@ vows.describe('log4js layouts').addBatch({
}
},
'add layout': {
topic: require('../lib/layouts'),
topic: require('../../lib/layouts'),
'should be able to add a layout': function(layouts) {
layouts.addLayout('test_layout', function(config){
assert.equal(config, 'test_config');

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels');
, levels = require('../../lib/levels');
function assertThat(level) {
function assertForEach(assertion, test, otherLevels) {

View File

@ -9,7 +9,7 @@ vows.describe('log4js-abspath').addBatch({
topic: function() {
var appenderOptions,
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires:
{ './appenders/fake':
{ name: "fake",
@ -30,7 +30,7 @@ vows.describe('log4js-abspath').addBatch({
}
]
};
log4js.configure(config, {
cwd: '/absolute/path/to'
});
@ -45,10 +45,10 @@ vows.describe('log4js-abspath').addBatch({
topic: function() {
var fileOpened,
fileAppender = sandbox.require(
'../lib/appenders/file',
'../../lib/appenders/file',
{ requires:
{ '../streams':
{ RollingFileStream:
{ 'streamroller':
{ RollingFileStream:
function(file) {
fileOpened = file;
return {
@ -61,10 +61,10 @@ vows.describe('log4js-abspath').addBatch({
}
);
fileAppender.configure(
{
filename: "whatever.log",
maxLogSize: 10
},
{
filename: "whatever.log",
maxLogSize: 10
},
{ cwd: '/absolute/path/to' }
);
return fileOpened;

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows'),
assert = require('assert'),
log4js = require('../lib/log4js'),
log4js = require('../../lib/log4js'),
sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -23,7 +23,7 @@ function setupLogging(category, options) {
}
};
var lfsModule = sandbox.require('../lib/appenders/logFacesAppender', {
var lfsModule = sandbox.require('../../lib/appenders/logFacesAppender', {
requires: {
'dgram': fakeDgram
}

View File

@ -16,10 +16,10 @@ function remove(filename) {
vows.describe('log4js logLevelFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], logger;
var log4js = require('../../lib/log4js'), logEvents = [], logger;
log4js.clearAppenders();
log4js.addAppender(
require('../lib/appenders/logLevelFilter')
require('../../lib/appenders/logLevelFilter')
.appender(
'ERROR',
undefined,
@ -44,14 +44,14 @@ vows.describe('log4js logLevelFilter').addBatch({
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
var log4js = require('../../lib/log4js')
, logger;
remove(__dirname + '/logLevelFilter.log');
remove(__dirname + '/logLevelFilter-warnings.log');
remove(__dirname + '/logLevelFilter-debugs.log');
log4js.configure('test/with-logLevelFilter.json');
log4js.configure('test/vows/with-logLevelFilter.json');
logger = log4js.getLogger("tests");
logger.debug('debug');
logger.info('info');

View File

@ -1,11 +1,11 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels')
, loggerModule = require('../lib/logger')
, levels = require('../../lib/levels')
, loggerModule = require('../../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
vows.describe('../../lib/logger').addBatch({
'constructor with no parameters': {
topic: new Logger(),
'should use default category': function(logger) {

View File

@ -15,7 +15,7 @@ function setupConsoleTest() {
});
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
globals: {
console: fakeConsole
@ -35,7 +35,7 @@ vows.describe('log4js').addBatch({
'getBufferedLogger': {
topic: function () {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests');
return logger;
@ -54,7 +54,7 @@ vows.describe('log4js').addBatch({
'cache events': {
topic: function () {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests1');
var events = [];
@ -78,7 +78,7 @@ vows.describe('log4js').addBatch({
'log events after flush() is called': {
topic: function () {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests2');
logger.target.setLevel("TRACE");
@ -106,7 +106,7 @@ vows.describe('log4js').addBatch({
'getLogger': {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getLogger('tests');
logger.setLevel("DEBUG");
@ -162,7 +162,7 @@ vows.describe('log4js').addBatch({
shutdownCallbackCalled: false
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/file':
@ -194,7 +194,7 @@ vows.describe('log4js').addBatch({
events.shutdownCallbackCalled = true;
// Re-enable log writing so other tests that use logger are not
// affected.
require('../lib/logger').enableAllLogWrites();
require('../../lib/logger').enableAllLogWrites();
callback(null, events);
});
},
@ -220,7 +220,7 @@ vows.describe('log4js').addBatch({
topic: function() {
var appenderConfig,
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/file':
@ -254,7 +254,7 @@ vows.describe('log4js').addBatch({
'configuration that causes an error': {
topic: function() {
var log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/file':
@ -292,7 +292,7 @@ vows.describe('log4js').addBatch({
var appenderConfig,
configFilename,
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{ requires:
{ 'fs':
{ statSync:
@ -353,24 +353,24 @@ vows.describe('log4js').addBatch({
}
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/console': fakeConsoleAppender
'./appenders/stdout': fakeConsoleAppender
}
}
);
logger = log4js.getLogger("some-logger");
logger.debug("This is a test");
},
'should default to the console appender': function(evt) {
'should default to the stdout appender': function(evt) {
assert.equal(evt.data[0], "This is a test");
}
},
'addAppender' : {
topic: function() {
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
log4js.clearAppenders();
return log4js;
},
@ -487,10 +487,10 @@ vows.describe('log4js').addBatch({
log: function() { }
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'./appenders/console': fakeConsole
'./appenders/stdout': fakeConsole
},
globals: {
console: globalConsole
@ -505,7 +505,7 @@ vows.describe('log4js').addBatch({
return appenderEvents;
},
'should configure a console appender': function(appenderEvents) {
'should configure a stdout appender': function(appenderEvents) {
assert.equal(appenderEvents[0].data[0], 'this is a test');
},
@ -607,13 +607,13 @@ vows.describe('log4js').addBatch({
'configuration persistence' : {
topic: function() {
var logEvent,
firstLog4js = require('../lib/log4js'),
firstLog4js = require('../../lib/log4js'),
secondLog4js;
firstLog4js.clearAppenders();
firstLog4js.addAppender(function(evt) { logEvent = evt; });
secondLog4js = require('../lib/log4js');
secondLog4js = require('../../lib/log4js');
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
return logEvent;
@ -625,7 +625,7 @@ vows.describe('log4js').addBatch({
'getDefaultLogger': {
topic: function() {
return require('../lib/log4js').getDefaultLogger();
return require('../../lib/log4js').getDefaultLogger();
},
'should return a logger': function(logger) {
assert.ok(logger.info);

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, sandbox = require('sandboxed-module')
;
@ -39,7 +39,7 @@ function setupLogging(category, options) {
}
};
var logglyModule = sandbox.require('../lib/appenders/loggly', {
var logglyModule = sandbox.require('../../lib/appenders/loggly', {
requires: {
'loggly': fakeLoggly,
'../layouts': fakeLayouts

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, log4js = require('../lib/log4js')
, log4js = require('../../lib/log4js')
, sandbox = require('sandboxed-module')
;
@ -24,7 +24,7 @@ function setupLogging(category, options) {
}
};
var logstashModule = sandbox.require('../lib/appenders/logstashUDP', {
var logstashModule = sandbox.require('../../lib/appenders/logstashUDP', {
requires: {
'dgram': fakeDgram
}

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -48,7 +48,7 @@ function setupLogging(category, options) {
};
var mailgunModule = sandbox.require('../lib/appenders/mailgun', {
var mailgunModule = sandbox.require('../../lib/appenders/mailgun', {
requires: {
'mailgun-js': fakeMailgun,
'../layouts': fakeLayouts

View File

@ -59,7 +59,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -118,7 +118,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -153,7 +153,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -172,7 +172,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -252,7 +252,7 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
@ -273,7 +273,7 @@ vows.describe('Multiprocess Appender').addBatch({
var results = {}
, fakeNet = makeFakeNet()
, appender = sandbox.require(
'../lib/appenders/multiprocess',
'../../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet,

View File

@ -1,12 +1,12 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, Level = require('../lib/levels')
, log4js = require('../lib/log4js')
, loggerModule = require('../lib/logger')
, Level = require('../../lib/levels')
, log4js = require('../../lib/log4js')
, loggerModule = require('../../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
vows.describe('../../lib/logger').addBatch({
'creating a new log level': {
topic: function () {
Level.forName("DIAG", 6000);

View File

@ -3,7 +3,7 @@ var vows = require('vows')
, assert = require('assert')
, util = require('util')
, EE = require('events').EventEmitter
, levels = require('../lib/levels');
, levels = require('../../lib/levels');
function MockLogger() {
@ -45,7 +45,7 @@ util.inherits(MockResponse, EE);
vows.describe('log4js connect logger').addBatch({
'getConnectLoggerModule': {
topic: function() {
var clm = require('../lib/connect-logger');
var clm = require('../../lib/connect-logger');
return clm;
},

View File

@ -15,7 +15,7 @@ function setupConsoleTest() {
});
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
globals: {
console: fakeConsole
@ -75,7 +75,7 @@ vows.describe('reload configuration').addBatch({
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,
@ -113,7 +113,7 @@ vows.describe('reload configuration').addBatch({
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: {
@ -152,7 +152,7 @@ vows.describe('reload configuration').addBatch({
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,
@ -193,7 +193,7 @@ vows.describe('reload configuration').addBatch({
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: {
@ -230,7 +230,7 @@ vows.describe('reload configuration').addBatch({
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,
@ -284,7 +284,7 @@ vows.describe('reload configuration').addBatch({
'when called twice with reload options': {
topic: function() {
var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
var modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
fakeFS = {
readFileSync: function (file, encoding) {
return JSON.stringify({});
@ -310,7 +310,7 @@ vows.describe('reload configuration').addBatch({
return 1234;
},
log4js = sandbox.require(
'../lib/log4js',
'../../lib/log4js',
{
requires: {
'fs': fakeFS,

View File

@ -10,7 +10,7 @@
// Basic set up
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var logger = log4js.getLogger('test-setLevel-asymmetry');
// uncomment one or other of the following to see progress (or not) while running the tests

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -51,7 +51,7 @@ function setupLogging(category, options) {
};
var slackModule = sandbox.require('../lib/appenders/slack', {
var slackModule = sandbox.require('../../lib/appenders/slack', {
requires: {
'slack-node': fakeSlack,
'../layouts': fakeLayouts

View File

@ -1,7 +1,7 @@
"use strict";
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var log4js = require('../../lib/log4js');
var sandbox = require('sandboxed-module');
function setupLogging(category, options) {
@ -41,7 +41,7 @@ function setupLogging(category, options) {
var fakeTransportPlugin = function () {
};
var smtpModule = sandbox.require('../lib/appenders/smtp', {
var smtpModule = sandbox.require('../../lib/appenders/smtp', {
requires: {
'nodemailer': fakeMailer,
'nodemailer-sendmail-transport': fakeTransportPlugin,

View File

@ -2,8 +2,8 @@
var assert = require('assert')
, vows = require('vows')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, levels = require('../lib/levels');
, log4js = require('../../lib/log4js')
, levels = require('../../lib/levels');
vows.describe('subcategories').addBatch({
'loggers created after levels configuration is loaded': {

View File

@ -0,0 +1,23 @@
{
"appenders": [
{
"type": "categoryFilter",
"exclude": "web",
"appender": {
"type": "file",
"filename": "test/vows/categoryFilter-noweb.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "web",
"type": "file",
"filename": "test/vows/categoryFilter-web.log",
"layout": {
"type": "messagePassThrough"
}
}
]
}

View File

@ -0,0 +1,17 @@
{
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/vows/date-file-test.log",
"pattern": "-from-MM-dd",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "WARN"
}
}

View File

@ -1,15 +1,15 @@
{
"appenders": [
{
"category": "tests",
{
"category": "tests",
"type": "logLevelFilter",
"level": "WARN",
"appender": {
"type": "file",
"filename": "test/logLevelFilter-warnings.log",
"layout": {
"type": "messagePassThrough"
}
"filename": "test/vows/logLevelFilter-warnings.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
@ -19,22 +19,22 @@
"maxLevel": "DEBUG",
"appender": {
"type": "file",
"filename": "test/logLevelFilter-debugs.log",
"filename": "test/vows/logLevelFilter-debugs.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "tests",
{
"category": "tests",
"type": "file",
"filename": "test/logLevelFilter.log",
"layout": {
"type": "messagePassThrough"
}
"filename": "test/vows/logLevelFilter.log",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "TRACE"
}

View File

@ -1,23 +0,0 @@
{
"appenders": [
{
"type": "categoryFilter",
"exclude": "web",
"appender": {
"type": "file",
"filename": "test/categoryFilter-noweb.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "web",
"type": "file",
"filename": "test/categoryFilter-web.log",
"layout": {
"type": "messagePassThrough"
}
}
]
}

View File

@ -1,17 +0,0 @@
{
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test.log",
"pattern": "-from-MM-dd",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "WARN"
}
}