Merge pull request #1151 from log4js-node/feat-zero-backup

allow for zero backup - https://github.com/log4js-node/streamroller/pull/74
This commit is contained in:
Lam Wei Li 2022-01-20 01:52:10 +08:00 committed by GitHub
commit e6bd888c2d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 14 additions and 18 deletions

View File

@ -45,9 +45,7 @@ function openTheStream(file, fileSize, numFiles, options) {
*/
function fileAppender(file, layout, logSize, numBackups, options, timezoneOffset) {
file = path.normalize(file);
numBackups = numBackups === undefined ? 5 : numBackups;
// there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
numBackups = (!numBackups && numBackups !== 0) ? 5 : numBackups;
debug(
'Creating file appender (',

View File

@ -30,7 +30,7 @@ class RollingFileSync {
this.filename = filename;
this.size = size;
this.backups = backups || 1;
this.backups = backups;
this.options = options;
this.currentSize = 0;
@ -80,7 +80,9 @@ class RollingFileSync {
function increaseFileIndex(fileToRename) {
const idx = index(fileToRename);
debug(`Index of ${fileToRename} is ${idx}`);
if (idx < that.backups) {
if (that.backups === 0) {
fs.truncateSync(filename, 0);
} else if (idx < that.backups) {
// on windows, you can get a EEXIST error if you rename a file to an existing file
// so, we'll try to delete the file we're renaming to first
try {
@ -146,9 +148,7 @@ class RollingFileSync {
function fileAppender(file, layout, logSize, numBackups, timezoneOffset, options) {
debug('fileSync appender created');
file = path.normalize(file);
numBackups = numBackups === undefined ? 5 : numBackups;
// there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
numBackups = (!numBackups && numBackups !== 0) ? 5 : numBackups;
function openTheStream(filePath, fileSize, numFiles) {
let stream;

View File

@ -82,9 +82,9 @@ test("log4js fileAppender", batch => {
t.tearDown(async () => {
await new Promise(resolve => log4js.shutdown(resolve));
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
await removeFile(testFile);
});
await Promise.all([removeFile(testFile), removeFile(`${testFile}.1`)]);
await removeFile(testFile);
// log file of 100 bytes maximum, no backups
log4js.configure({
@ -113,7 +113,7 @@ test("log4js fileAppender", batch => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-test.log")
);
t.equal(logFiles.length, 2, "should be 2 files");
t.equal(logFiles.length, 1, "should be 1 file");
t.end();
});
@ -158,7 +158,7 @@ test("log4js fileAppender", batch => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-unit-test.log")
);
t.equal(logFiles.length, 2, "should be 2 files");
t.equal(logFiles.length, 1, "should be 1 file");
t.end();
});

View File

@ -43,11 +43,9 @@ test("log4js fileSyncAppender", batch => {
const testFile = path.join(__dirname, "/fa-maxFileSize-sync-test.log");
const logger = log4js.getLogger("max-file-size");
remove(testFile);
remove(`${testFile}.1`);
t.tearDown(() => {
remove(testFile);
remove(`${testFile}.1`);
});
// log file of 100 bytes maximum, no backups
@ -77,12 +75,12 @@ test("log4js fileSyncAppender", batch => {
});
});
t.test("there should be two test files", assert => {
t.test("there should be one test files", assert => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-sync-test.log")
);
assert.equal(logFiles.length, 2);
assert.equal(logFiles.length, 1);
assert.end();
});
});
@ -128,12 +126,12 @@ test("log4js fileSyncAppender", batch => {
});
});
t.test("there should be two test files", assert => {
t.test("there should be one test file", assert => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file =>
file.includes("fa-maxFileSize-unit-sync-test.log")
);
assert.equal(logFiles.length, 2);
assert.equal(logFiles.length, 1);
assert.end();
});
});