Merge pull request #1200 from log4js-node/update-test

chore(test): improve test coverage
This commit is contained in:
Lam Wei Li 2022-03-08 17:52:45 +08:00 committed by GitHub
commit f8a5b5a226
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 304 additions and 31 deletions

View File

@ -51,19 +51,15 @@ function touchFile(file, options) {
}
class RollingFileSync {
constructor(filename, size, backups, options) {
constructor(filename, maxLogSize, backups, options) {
debug('In RollingFileStream');
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error('You must specify a filename and file size');
}
if (maxLogSize < 0) {
throw new Error(`maxLogSize (${maxLogSize}) should be > 0`);
}
throwErrorIfArgumentsAreNotValid();
this.filename = filename;
this.size = size;
this.size = maxLogSize;
this.backups = backups;
this.options = options;
this.currentSize = 0;

View File

@ -92,7 +92,7 @@ const createAppender = (name, config) => {
appender => getAppender(appender, config),
levels
);
}, () => { });
}, /* istanbul ignore next */ () => {});
};
const setup = (config) => {

View File

@ -14,6 +14,7 @@ module.exports.configure = (config, layouts) => {
function checkForTimeout(fileKey) {
const timer = timers.get(fileKey);
const app = files.get(fileKey);
/* istanbul ignore else */
if (timer && app) {
if (Date.now() - timer.lastUsed > timer.timeout) {
debug('%s not used for > %d ms => close', fileKey, timer.timeout);

View File

@ -256,8 +256,8 @@ test("log4js configuration validation", batch => {
batch.test("should not throw error if configure object is freezed", t => {
const testFile = "test/tap/freeze-date-file-test";
t.teardown(() => {
removeFiles(testFile);
t.teardown(async () => {
await removeFiles(testFile);
});
t.doesNotThrow(() =>
log4js.configure(

View File

@ -13,6 +13,8 @@ test("default settings", t => {
output.push(evt);
};
},
shutdown() {
},
configure() {
return this.appender();
}

View File

@ -68,7 +68,7 @@ if (process.platform !== "win32") {
});
batch.teardown(async () => {
log4js.shutdown();
await new Promise(resolve => log4js.shutdown(resolve));
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);

View File

@ -39,7 +39,7 @@ test("file appender single SIGHUP handler", t => {
log4js.configure(config);
t.teardown(async () => {
log4js.shutdown();
await new Promise(resolve => log4js.shutdown(resolve));
const filenames = Object.values(config.appenders).map(appender => appender.filename);
await removeFiles(filenames);

View File

@ -69,6 +69,7 @@ test("log4js fileAppender", batch => {
batch.test("should flush logs on shutdown", async t => {
const testFile = path.join(__dirname, "fa-default-test.log");
const logger = log4js.getLogger("default-settings");
await removeFile(testFile);
t.teardown(async () => {
@ -80,7 +81,6 @@ test("log4js fileAppender", batch => {
appenders: { test: { type: "file", filename: testFile } },
categories: { default: { appenders: ["test"], level: "trace" } }
});
const logger = log4js.getLogger("default-settings");
logger.info("1");
logger.info("2");
@ -100,12 +100,12 @@ test("log4js fileAppender", batch => {
batch.test("with a max file size and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-test.log");
const logger = log4js.getLogger("max-file-size");
await removeFile(testFile);
t.teardown(async () => {
await new Promise(resolve => log4js.shutdown(resolve));
await removeFile(testFile);
});
await removeFile(testFile);
// log file of 100 bytes maximum, no backups
log4js.configure({
@ -162,12 +162,12 @@ test("log4js fileAppender", batch => {
batch.test("with a max file size in unit mode and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-unit-test.log");
const logger = log4js.getLogger("max-file-size-unit");
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
t.teardown(async () => {
await new Promise(resolve => log4js.shutdown(resolve));
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
});
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
// log file of 1K = 1024 bytes maximum, no backups
log4js.configure({

View File

@ -61,6 +61,28 @@ test("log4js fileSyncAppender", batch => {
t.end();
});
batch.test("should give error if invalid maxLogSize", async t => {
const maxLogSize = -1;
const expectedError = new Error(`maxLogSize (${maxLogSize}) should be > 0`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "fileSync",
filename: path.join(__dirname, "fa-invalidMaxFileSize-sync-test.log"),
maxLogSize: -1
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("with a max file size and no backups", t => {
const testFile = path.join(__dirname, "/fa-maxFileSize-sync-test.log");
const logger = log4js.getLogger("max-file-size");

View File

@ -154,6 +154,8 @@ test("log4js", batch => {
"should invoke appender shutdowns"
);
logger.info("this should not go to the appenders");
logger.log("info", "this should not go to the appenders");
logger._log(require("../../lib/levels").INFO, ["this should not go to the appenders"]);
t.notOk(events.event);
t.end();
});

View File

@ -2,6 +2,7 @@ const process = require("process");
const { test } = require("tap");
const debug = require("debug");
const fs = require("fs");
const sandbox = require("@log4js-node/sandboxed-module");
const log4js = require("../../lib/log4js");
const removeFiles = async filenames => {
@ -74,6 +75,7 @@ test("multiFile appender", batch => {
batch.test("should close file after timeout", t => {
t.teardown(async () => {
await new Promise(resolve => log4js.shutdown(resolve));
await removeFiles("logs/C.log");
});
/* checking that the file is closed after a timeout is done by looking at the debug logs
@ -119,8 +121,81 @@ test("multiFile appender", batch => {
}, timeoutMs*1 + 30); // add a 30 ms delay
});
batch.test("should close file safely after timeout", t => {
t.teardown(async () => {
await new Promise(resolve => sandboxedLog4js.shutdown(resolve)); // eslint-disable-line no-use-before-define
await removeFiles("logs/C.log");
});
const error = new Error("fileAppender shutdown error");
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
requires: {
"./appenders/file": {
configure(config, layouts) {
const fileAppender = require("../../lib/appenders/file").configure(config, layouts);
const originalShutdown = fileAppender.shutdown;
fileAppender.shutdown = function (complete) {
const onCallback = function() {
complete(error);
};
originalShutdown(onCallback);
};
return fileAppender;
}
},
debug
}
});
/* checking that the file is closed after a timeout is done by looking at the debug logs
since detecting file locks with node.js is platform specific.
*/
const debugWasEnabled = debug.enabled("log4js:multiFile");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
debug.enable("log4js:multiFile");
const timeoutMs = 20;
sandboxedLog4js.configure({
appenders: {
multi: {
type: "multiFile",
base: "logs/",
property: "label",
extension: ".log",
timeout: timeoutMs
}
},
categories: { default: { appenders: ["multi"], level: "info" } }
});
const loggerC = sandboxedLog4js.getLogger("cheese");
loggerC.addContext("label", "C");
loggerC.info("I am in logger C");
setTimeout(() => {
t.match(
debugLogs[debugLogs.length - 2],
`C not used for > ${timeoutMs} ms => close`,
"(timeout1) should have closed"
);
t.match(
debugLogs[debugLogs.length - 1],
`ignore error on file shutdown: ${error.message}`,
"safely shutdown"
);
if (!debugWasEnabled) {
debug.disable("log4js:multiFile");
}
process.stderr.write = originalWrite;
t.end();
}, timeoutMs*1 + 30); // add a 30 ms delay
});
batch.test("should close file after extended timeout", t => {
t.teardown(async () => {
await new Promise(resolve => log4js.shutdown(resolve));
await removeFiles("logs/D.log");
});
/* checking that the file is closed after a timeout is done by looking at the debug logs

View File

@ -79,7 +79,7 @@ test("Multiprocess Appender", async batch => {
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.close(true);
fakeNet.cbs.close();
logger.info("after error, before connect");
fakeNet.cbs.connect();
logger.info("after error, after connect");
@ -173,6 +173,45 @@ test("Multiprocess Appender", async batch => {
t.end();
});
batch.test("worker with error", t => {
const fakeNet = makeFakeNet();
const log4js = sandbox.require("../../lib/log4js", {
requires: {
net: fakeNet
}
});
log4js.configure({
appenders: { worker: { type: "multiprocess", mode: "worker" } },
categories: { default: { appenders: ["worker"], level: "trace" } }
});
const logger = log4js.getLogger();
logger.info("before connect");
fakeNet.cbs.connect();
logger.info("after connect");
fakeNet.cbs.error();
logger.info("after error, before close");
fakeNet.cbs.close();
logger.info("after close, before connect");
fakeNet.cbs.connect();
logger.info("after close, after connect");
const net = fakeNet;
t.test("should attempt to re-open the socket", assert => {
// skipping the __LOG4JS__ separators
assert.match(net.data[0], "before connect");
assert.match(net.data[2], "after connect");
assert.match(net.data[4], "after error, before close");
assert.match(net.data[6], "after close, before connect");
assert.match(net.data[8], "after close, after connect");
assert.equal(net.createConnectionCalled, 2);
assert.end();
});
t.end();
});
batch.test("worker defaults", t => {
const fakeNet = makeFakeNet();

View File

@ -0,0 +1,28 @@
const { test } = require("tap");
const log4js = require("../../lib/log4js");
test("recording appender", t => {
log4js.configure({
appenders: { rec: { type: 'recording' } },
categories: { default: { appenders: [ 'rec' ], 'level': 'debug' } }
});
const logger = log4js.getLogger();
logger.level = 'debug';
logger.debug('This will go to the recording!');
logger.debug('Another one');
const recording = log4js.recording();
const loggingEvents = recording.playback();
t.equal(loggingEvents.length, 2, "There should be 2 recorded events");
t.equal(loggingEvents[0].data[0], "This will go to the recording!");
t.equal(loggingEvents[1].data[0], "Another one");
recording.reset();
const loggingEventsPostReset = recording.playback();
t.equal(loggingEventsPostReset.length, 0, "There should be 0 recorded events");
t.end();
});

View File

@ -5,21 +5,20 @@ const vcr = require("../../lib/appenders/recording");
const levels = require("../../lib/levels");
const LoggingEvent = require("../../lib/LoggingEvent");
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server", port: 5678 }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
test("TCP Server", batch => {
batch.test(
"should listen for TCP messages and re-send via process.send",
t => {
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server", port: 5678 }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5678, () => {
socket.write(
@ -83,8 +82,114 @@ test("TCP Server", batch => {
socket.unref();
}, 100);
batch.end();
}
);
batch.test(
"sending incomplete messages in chunks",
t => {
log4js.configure({
appenders: {
vcr: { type: "recording" },
tcp: { type: "tcp-server" }
},
categories: {
default: { appenders: ["vcr"], level: "debug" }
}
});
// give the socket a chance to start up
setTimeout(() => {
const socket = net.connect(5000, () => {
const syncWrite = (dataArray = [], finalCallback) => {
if (dataArray === null) {
dataArray = [];
} else if (!Array.isArray(dataArray)) {
dataArray = [dataArray];
}
if (typeof finalCallback !== "function") {
finalCallback = () => {};
}
setTimeout(() => {
if (!dataArray.length) {
finalCallback();
} else if (dataArray.length === 1) {
socket.write(dataArray.shift(), finalCallback);
} else {
socket.write(dataArray.shift(), () => { syncWrite(dataArray, finalCallback); });
}
}, 100);
};
const dataArray = [
"__LOG4JS__",
"Hello__LOG4JS__World",
"__LOG4JS__",
"testing nonsense",
`__LOG4JS__more nonsense__LOG4JS__`
];
const finalCallback = () => {
socket.end();
setTimeout(() => {
log4js.shutdown(() => {
const logs = vcr.replay();
t.equal(logs.length, 8);
t.match(logs[4], {
data: [
"Unable to parse log:",
"Hello",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[5], {
data: [
"Unable to parse log:",
"World",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[6], {
data: [
"Unable to parse log:",
"testing nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.match(logs[7], {
data: [
"Unable to parse log:",
"more nonsense",
"because: ",
SyntaxError
],
categoryName: "log4js",
level: { levelStr: "ERROR" },
context: {}
});
t.end();
});
}, 100);
};
syncWrite(dataArray, finalCallback);
});
socket.unref();
}, 100);
}
);
batch.end();
});

View File

@ -139,7 +139,7 @@ log4js.configure({
log4js.configure({
appenders: { rec: { type: 'recording' } },
categories: { default: { appenders: [ 'rec'], 'level': 'debug' } }
categories: { default: { appenders: ['rec'], 'level': 'debug' } }
});
const logger8 = log4js.getLogger();
logger8.level = 'debug'
@ -150,6 +150,9 @@ const loggingEvents = recording.playback()
if (loggingEvents.length !== 2) {
throw new Error(`Expected 2 recorded events, got ${loggingEvents.length}`)
}
if (loggingEvents[0].data[0] !== 'This will go to the recording!') {
throw new Error(`Expected message 'This will go to the recording!', got ${loggingEvents[0].data[0]}`)
}
if (loggingEvents[1].data[0] !== 'Another one') {
throw new Error(`Expected message 'Another one', got ${loggingEvents[1].data[0]}`)
}