Merge pull request #1184 from log4js-node/update-test

chore(test): improve test coverage
This commit is contained in:
Lam Wei Li 2022-02-08 15:22:35 +08:00 committed by GitHub
commit ca97feedfc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 182 additions and 9 deletions

View File

@ -49,6 +49,7 @@ module.exports.configure = (config, layouts) => {
});
}
} else if (config.timeout) {
debug('%s extending activity', fileKey);
timers.get(fileKey).lastUsed = Date.now();
}
@ -64,7 +65,8 @@ module.exports.configure = (config, layouts) => {
cb();
}
let error;
timers.forEach((timer) => {
timers.forEach((timer, fileKey) => {
debug('clearing timer for ', fileKey);
clearInterval(timer.interval);
});
files.forEach((app, fileKey) => {

View File

@ -34,7 +34,7 @@
},
"scripts": {
"pretest": "eslint \"lib/**/*.js\" \"test/**/*.js\"",
"test": "tap \"test/tap/**/*.js\" --cov",
"test": "tap \"test/tap/**/*.js\" --cov --timeout=45",
"typings": "tsc -p types/tsconfig.json",
"codecov": "tap \"test/tap/**/*.js\" --cov --coverage-report=lcov && codecov"
},

View File

@ -40,6 +40,38 @@ test("log4js categoryFilter", batch => {
t.end();
});
batch.test("appender should exclude categories", t => {
log4js.configure({
appenders: {
recorder: { type: "recording" },
filtered: {
type: "categoryFilter",
exclude: ["app", "web"],
appender: "recorder"
}
},
categories: { default: { appenders: ["filtered"], level: "DEBUG" } }
});
const webLogger = log4js.getLogger("web");
const appLogger = log4js.getLogger("app");
webLogger.debug("This should not get logged");
appLogger.debug("This should get logged");
webLogger.debug("Hello again");
log4js
.getLogger("db")
.debug("This should be included by the appender anyway");
const logEvents = recording.replay();
t.equal(logEvents.length, 1);
t.equal(
logEvents[0].data[0],
"This should be included by the appender anyway"
);
t.end();
});
batch.test("should not really need a category filter any more", t => {
log4js.configure({
appenders: { recorder: { type: "recording" } },

View File

@ -117,6 +117,27 @@ test("log4js fileAppender", batch => {
t.end();
});
batch.test("with a max file size in wrong unit mode", async t => {
const invalidUnit = "1Z";
const expectedError = new Error(`maxLogSize: "${invalidUnit}" is invalid`);
t.throws(
() =>
log4js.configure({
appenders: {
file: {
type: "file",
maxLogSize: invalidUnit
}
},
categories: {
default: { appenders: ["file"], level: "debug" }
}
}),
expectedError
);
t.end();
});
batch.test("with a max file size in unit mode and no backups", async t => {
const testFile = path.join(__dirname, "fa-maxFileSize-unit-test.log");
const logger = log4js.getLogger("max-file-size-unit");
@ -365,19 +386,19 @@ test("log4js fileAppender", batch => {
});
logger.info("This should be in the file.",
"\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.");
"\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.", {}, []);
await sleep(100);
let fileContents = await fs.readFile(testFilePlain, "utf8");
t.match(fileContents, `This should be in the file. Color should be plain.${EOL}`);
t.match(fileContents, `This should be in the file. Color should be plain. {} []${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
);
fileContents = await fs.readFile(testFileAsIs, "utf8");
t.match(fileContents, "This should be in the file.",
`\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m.${EOL}`);
`\x1b[33mColor\x1b[0m \x1b[93;41mshould\x1b[0m be \x1b[38;5;8mplain\x1b[0m. {} []${EOL}`);
t.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /

View File

@ -89,6 +89,7 @@ test("multiFile appender", batch => {
}
};
debug.enable("log4js:multiFile");
const timeoutMs = 20;
log4js.configure({
appenders: {
multi: {
@ -96,7 +97,7 @@ test("multiFile appender", batch => {
base: "logs/",
property: "label",
extension: ".log",
timeout: 20
timeout: timeoutMs
}
},
categories: { default: { appenders: ["multi"], level: "info" } }
@ -107,14 +108,131 @@ test("multiFile appender", batch => {
setTimeout(() => {
t.match(
debugLogs[debugLogs.length - 1],
"C not used for > 20 ms => close"
`C not used for > ${timeoutMs} ms => close`,
"(timeout1) should have closed"
);
if (!debugWasEnabled) {
debug.disable("log4js:multiFile");
}
process.stderr.write = originalWrite;
t.end();
}, 50);
}, timeoutMs*1 + 30); // add a 30 ms delay
});
batch.test("should close file after extended timeout", t => {
t.teardown(async () => {
await removeFiles("logs/D.log");
});
/* checking that the file is closed after a timeout is done by looking at the debug logs
since detecting file locks with node.js is platform specific.
*/
const debugWasEnabled = debug.enabled("log4js:multiFile");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
debug.enable("log4js:multiFile");
const timeoutMs = 100;
log4js.configure({
appenders: {
multi: {
type: "multiFile",
base: "logs/",
property: "label",
extension: ".log",
timeout: timeoutMs
}
},
categories: { default: { appenders: ["multi"], level: "info" } }
});
const loggerD = log4js.getLogger("cheese");
loggerD.addContext("label", "D");
loggerD.info("I am in logger D");
setTimeout(() => {
loggerD.info("extending activity!");
t.match(
debugLogs[debugLogs.length - 1],
"D extending activity",
"should have extended"
);
}, timeoutMs/2);
setTimeout(() => {
t.notOk(
debugLogs.some(s => s.indexOf(`D not used for > ${timeoutMs} ms => close`) !== -1),
"(timeout1) should not have closed"
);
}, timeoutMs*1 + 30); // add a 30 ms delay
setTimeout(() => {
t.match(
debugLogs[debugLogs.length - 1],
`D not used for > ${timeoutMs} ms => close`,
"(timeout2) should have closed"
);
if (!debugWasEnabled) {
debug.disable("log4js:multiFile");
}
process.stderr.write = originalWrite;
t.end();
}, timeoutMs*2 + 30); // add a 30 ms delay
});
batch.test("should clear interval for active timers on shutdown", t => {
t.teardown(async () => {
await removeFiles("logs/D.log");
});
/* checking that the file is closed after a timeout is done by looking at the debug logs
since detecting file locks with node.js is platform specific.
*/
const debugWasEnabled = debug.enabled("log4js:multiFile");
const debugLogs = [];
const originalWrite = process.stderr.write;
process.stderr.write = (string, encoding, fd) => {
debugLogs.push(string);
if (debugWasEnabled) {
originalWrite.apply(process.stderr, [string, encoding, fd]);
}
};
debug.enable("log4js:multiFile");
const timeoutMs = 100;
log4js.configure({
appenders: {
multi: {
type: "multiFile",
base: "logs/",
property: "label",
extension: ".log",
timeout: timeoutMs
}
},
categories: { default: { appenders: ["multi"], level: "info" } }
});
const loggerD = log4js.getLogger("cheese");
loggerD.addContext("label", "D");
loggerD.info("I am in logger D");
log4js.shutdown(() => {
t.notOk(
debugLogs.some(s => s.indexOf(`D not used for > ${timeoutMs} ms => close`) !== -1),
"should not have closed"
);
t.ok(
debugLogs.some(s => s.indexOf("clearing timer for D") !== -1),
"should have cleared timers"
);
t.match(
debugLogs[debugLogs.length - 1],
"calling shutdown for D",
"should have called shutdown"
);
if (!debugWasEnabled) {
debug.disable("log4js:multiFile");
}
process.stderr.write = originalWrite;
t.end();
});
});
batch.test(