mirror of
https://github.com/log4js-node/log4js-node.git
synced 2025-12-08 19:26:01 +00:00
Merge pull request #1204 from log4js-node/update-test
chore(test): improve test coverage
This commit is contained in:
commit
92507f7aec
@ -188,5 +188,60 @@ test("../../lib/appenders/dateFile", batch => {
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.test("when underlying stream errors", t => {
|
||||
let consoleArgs;
|
||||
let errorHandler;
|
||||
|
||||
const DateRollingFileStream = class {
|
||||
end() {
|
||||
this.ended = true;
|
||||
}
|
||||
|
||||
on(evt, cb) {
|
||||
if (evt === "error") {
|
||||
this.errored = true;
|
||||
errorHandler = cb;
|
||||
}
|
||||
}
|
||||
|
||||
write() {
|
||||
this.written = true;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
const dateFileAppender = sandbox.require("../../lib/appenders/dateFile", {
|
||||
globals: {
|
||||
console: {
|
||||
error(...args) {
|
||||
consoleArgs = args;
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
streamroller: {
|
||||
DateRollingFileStream
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
dateFileAppender.configure(
|
||||
{ filename: "test1.log", maxLogSize: 100 },
|
||||
{ basicLayout() {} }
|
||||
);
|
||||
errorHandler({ error: "aargh" });
|
||||
|
||||
t.test("should log the error to console.error", assert => {
|
||||
assert.ok(consoleArgs);
|
||||
assert.equal(
|
||||
consoleArgs[0],
|
||||
"log4js.dateFileAppender - Writing to file %s, error happened "
|
||||
);
|
||||
assert.equal(consoleArgs[1], "test1.log");
|
||||
assert.equal(consoleArgs[2].error, "aargh");
|
||||
assert.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
@ -40,6 +40,42 @@ test("log4js fileSyncAppender", batch => {
|
||||
});
|
||||
});
|
||||
|
||||
batch.test("with existing file", t => {
|
||||
const testFile = path.join(__dirname, "/fa-existing-file-sync-test.log");
|
||||
const logger = log4js.getLogger("default-settings");
|
||||
remove(testFile);
|
||||
|
||||
t.teardown(() => {
|
||||
remove(testFile);
|
||||
});
|
||||
|
||||
log4js.configure({
|
||||
appenders: { sync: { type: "fileSync", filename: testFile } },
|
||||
categories: { default: { appenders: ["sync"], level: "debug" } }
|
||||
});
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
log4js.shutdown(() => {
|
||||
log4js.configure({
|
||||
appenders: { sync: { type: "fileSync", filename: testFile } },
|
||||
categories: { default: { appenders: ["sync"], level: "debug" } }
|
||||
});
|
||||
|
||||
logger.info("This should also be in the file.");
|
||||
|
||||
fs.readFile(testFile, "utf8", (err, fileContents) => {
|
||||
t.match(fileContents, `This should be in the file.${EOL}`);
|
||||
t.match(fileContents, `This should also be in the file.${EOL}`);
|
||||
t.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}] \[INFO] default-settings - /
|
||||
);
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
batch.test("should give error if invalid filename", async t => {
|
||||
const file = "";
|
||||
const expectedError = new Error(`Invalid filename: ${file}`);
|
||||
|
||||
@ -91,7 +91,7 @@ test("multiFile appender", batch => {
|
||||
}
|
||||
};
|
||||
debug.enable("log4js:multiFile");
|
||||
const timeoutMs = 20;
|
||||
const timeoutMs = 25;
|
||||
log4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
@ -118,7 +118,7 @@ test("multiFile appender", batch => {
|
||||
}
|
||||
process.stderr.write = originalWrite;
|
||||
t.end();
|
||||
}, timeoutMs*1 + 30); // add a 30 ms delay
|
||||
}, timeoutMs*1 + 50); // add a 50 ms delay
|
||||
});
|
||||
|
||||
batch.test("should close file safely after timeout", t => {
|
||||
@ -158,7 +158,7 @@ test("multiFile appender", batch => {
|
||||
}
|
||||
};
|
||||
debug.enable("log4js:multiFile");
|
||||
const timeoutMs = 20;
|
||||
const timeoutMs = 25;
|
||||
sandboxedLog4js.configure({
|
||||
appenders: {
|
||||
multi: {
|
||||
@ -190,7 +190,7 @@ test("multiFile appender", batch => {
|
||||
}
|
||||
process.stderr.write = originalWrite;
|
||||
t.end();
|
||||
}, timeoutMs*1 + 30); // add a 30 ms delay
|
||||
}, timeoutMs*1 + 50); // add a 50 ms delay
|
||||
});
|
||||
|
||||
batch.test("should close file after extended timeout", t => {
|
||||
@ -240,7 +240,7 @@ test("multiFile appender", batch => {
|
||||
debugLogs.some(s => s.indexOf(`D not used for > ${timeoutMs} ms => close`) !== -1),
|
||||
"(timeout1) should not have closed"
|
||||
);
|
||||
}, timeoutMs*1 + 30); // add a 30 ms delay
|
||||
}, timeoutMs*1 + 50); // add a 50 ms delay
|
||||
setTimeout(() => {
|
||||
t.match(
|
||||
debugLogs[debugLogs.length - 1],
|
||||
@ -252,7 +252,7 @@ test("multiFile appender", batch => {
|
||||
}
|
||||
process.stderr.write = originalWrite;
|
||||
t.end();
|
||||
}, timeoutMs*2 + 30); // add a 30 ms delay
|
||||
}, timeoutMs*2 + 50); // add a 50 ms delay
|
||||
});
|
||||
|
||||
batch.test("should clear interval for active timers on shutdown", t => {
|
||||
|
||||
@ -13,6 +13,7 @@ tap.test("Drain event test", batch => {
|
||||
|
||||
batch.test("Should emit pause event and resume when logging in a file with high frequency", t => {
|
||||
t.teardown(async () => {
|
||||
process.off("log4js:pause", process.listeners("log4js:pause")[process.listeners("log4js:pause").length - 1]);
|
||||
await removeFiles("logs/drain.log");
|
||||
});
|
||||
// Generate logger with 5k of highWaterMark config
|
||||
@ -31,8 +32,11 @@ tap.test("Drain event test", batch => {
|
||||
process.on("log4js:pause", value => {
|
||||
if (value) {
|
||||
paused = true;
|
||||
t.ok(value, "log4js:pause, true");
|
||||
} else {
|
||||
resumed = true;
|
||||
t.ok(!value, "log4js:pause, false");
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
@ -42,12 +46,12 @@ tap.test("Drain event test", batch => {
|
||||
logger.info("This is a test for emitting drain event");
|
||||
}
|
||||
}
|
||||
t.end();
|
||||
});
|
||||
|
||||
|
||||
batch.test("Should emit pause event and resume when logging in a date file with high frequency", (t) => {
|
||||
t.teardown(async () => {
|
||||
process.off("log4js:pause", process.listeners("log4js:pause")[process.listeners("log4js:pause").length - 1]);
|
||||
await removeFiles("logs/date-file-drain.log");
|
||||
});
|
||||
// Generate date file logger with 5kb of highWaterMark config
|
||||
@ -66,8 +70,11 @@ tap.test("Drain event test", batch => {
|
||||
process.on("log4js:pause", value => {
|
||||
if (value) {
|
||||
paused = true;
|
||||
t.ok(value, "log4js:pause, true");
|
||||
} else {
|
||||
resumed = true;
|
||||
t.ok(!value, "log4js:pause, false");
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
|
||||
@ -76,7 +83,6 @@ tap.test("Drain event test", batch => {
|
||||
if (!paused)
|
||||
logger.info("This is a test for emitting drain event in date file logger");
|
||||
}
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.teardown(async () => {
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
const { test } = require("tap");
|
||||
const net = require("net");
|
||||
const flatted = require("flatted");
|
||||
const sandbox = require("@log4js-node/sandboxed-module");
|
||||
const log4js = require("../../lib/log4js");
|
||||
const LoggingEvent = require("../../lib/LoggingEvent");
|
||||
|
||||
@ -26,6 +28,53 @@ function makeServer(config) {
|
||||
return server;
|
||||
}
|
||||
|
||||
function makeFakeNet() {
|
||||
return {
|
||||
data: [],
|
||||
cbs: {},
|
||||
createConnectionCalled: 0,
|
||||
createConnection(port, host) {
|
||||
const fakeNet = this;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
this.createConnectionCalled += 1;
|
||||
return {
|
||||
on(evt, cb) {
|
||||
fakeNet.cbs[evt] = cb;
|
||||
},
|
||||
write(data, encoding) {
|
||||
fakeNet.data.push(data);
|
||||
fakeNet.encoding = encoding;
|
||||
return false;
|
||||
},
|
||||
end() {
|
||||
fakeNet.closeCalled = true;
|
||||
}
|
||||
};
|
||||
},
|
||||
createServer(cb) {
|
||||
const fakeNet = this;
|
||||
cb({
|
||||
remoteAddress: "1.2.3.4",
|
||||
remotePort: "1234",
|
||||
setEncoding(encoding) {
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
on(event, cb2) {
|
||||
fakeNet.cbs[event] = cb2;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
listen(port, host) {
|
||||
fakeNet.port = port;
|
||||
fakeNet.host = host;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test("TCP Appender", batch => {
|
||||
|
||||
batch.test("Default Configuration", t => {
|
||||
@ -118,7 +167,6 @@ test("TCP Appender", batch => {
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
batch.test("Custom Layout", t => {
|
||||
messages = [];
|
||||
|
||||
@ -171,5 +219,104 @@ test("TCP Appender", batch => {
|
||||
});
|
||||
});
|
||||
|
||||
batch.test("when underlying stream errors", t => {
|
||||
const fakeNet = makeFakeNet();
|
||||
|
||||
const sandboxedLog4js = sandbox.require("../../lib/log4js", {
|
||||
requires: {
|
||||
net: fakeNet
|
||||
}
|
||||
});
|
||||
sandboxedLog4js.configure({
|
||||
appenders: {
|
||||
default: { type: "tcp" },
|
||||
},
|
||||
categories: {
|
||||
default: { appenders: ["default"], level: "debug" },
|
||||
}
|
||||
});
|
||||
|
||||
const logger = sandboxedLog4js.getLogger();
|
||||
|
||||
logger.info("before connect");
|
||||
t.test(
|
||||
"should buffer messages written before socket is connected",
|
||||
assert => {
|
||||
assert.equal(fakeNet.data.length, 0);
|
||||
assert.equal(fakeNet.createConnectionCalled, 1);
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
fakeNet.cbs.connect();
|
||||
t.test(
|
||||
"should flush buffered messages",
|
||||
assert => {
|
||||
assert.equal(fakeNet.data.length, 1);
|
||||
assert.equal(fakeNet.createConnectionCalled, 1);
|
||||
assert.match(fakeNet.data[0], "before connect");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
logger.info("after connect");
|
||||
t.test(
|
||||
"should write log messages to socket as flatted strings with a terminator string",
|
||||
assert => {
|
||||
assert.equal(fakeNet.data.length, 2);
|
||||
assert.match(fakeNet.data[0], "before connect");
|
||||
assert.ok(fakeNet.data[0].endsWith("__LOG4JS__"));
|
||||
assert.match(fakeNet.data[1], "after connect");
|
||||
assert.ok(fakeNet.data[1].endsWith("__LOG4JS__"));
|
||||
assert.equal(fakeNet.encoding, "utf8");
|
||||
assert.end();
|
||||
}
|
||||
);
|
||||
|
||||
fakeNet.cbs.error();
|
||||
logger.info("after error, before close");
|
||||
fakeNet.cbs.close();
|
||||
logger.info("after close, before connect");
|
||||
fakeNet.cbs.connect();
|
||||
logger.info("after error, after connect");
|
||||
t.test("should attempt to re-open the socket on error", assert => {
|
||||
assert.equal(fakeNet.data.length, 5);
|
||||
assert.equal(fakeNet.createConnectionCalled, 2);
|
||||
assert.match(fakeNet.data[2], "after error, before close");
|
||||
assert.match(fakeNet.data[3], "after close, before connect");
|
||||
assert.match(fakeNet.data[4], "after error, after connect");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test("should buffer messages until drain", assert => {
|
||||
const previousLength = fakeNet.data.length;
|
||||
logger.info("should not be flushed");
|
||||
assert.equal(fakeNet.data.length, previousLength);
|
||||
assert.notMatch(fakeNet.data[fakeNet.data.length - 1], "should not be flushed");
|
||||
|
||||
fakeNet.cbs.drain();
|
||||
assert.equal(fakeNet.data.length, previousLength + 1);
|
||||
assert.match(fakeNet.data[fakeNet.data.length - 1], "should not be flushed");
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.test("should serialize an Error correctly", assert => {
|
||||
const previousLength = fakeNet.data.length;
|
||||
logger.error(new Error("Error test"));
|
||||
fakeNet.cbs.drain();
|
||||
assert.equal(fakeNet.data.length, previousLength + 1);
|
||||
const raw = fakeNet.data[fakeNet.data.length - 1];
|
||||
assert.ok(
|
||||
flatted.parse(raw.substring(0, raw.indexOf('__LOG4JS__'))).data[0].stack,
|
||||
`Expected:\n\n${fakeNet.data[6]}\n\n to have a 'data[0].stack' property`
|
||||
);
|
||||
const actual = flatted.parse(raw.substring(0, raw.indexOf('__LOG4JS__'))).data[0].stack;
|
||||
assert.match(actual, /^Error: Error test/);
|
||||
assert.end();
|
||||
});
|
||||
|
||||
t.end();
|
||||
});
|
||||
|
||||
batch.end();
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user