Merge pull request #704 from NeoyeElf/feature/maxFileSize

feat: Enable maxLogSize with unit
This commit is contained in:
Gareth Jones 2018-05-18 07:47:59 +10:00 committed by GitHub
commit cff7c54c1c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 146 additions and 2 deletions

49
lib/appender-adapter.js Normal file
View File

@ -0,0 +1,49 @@
function maxFileSizeUnitTransform(maxLogSize) {
if (typeof maxLogSize === 'number' && Number.isInteger(maxLogSize)) {
return maxLogSize;
}
const units = {
K: 1024,
M: 1024 * 1024,
G: 1024 * 1024 * 1024,
};
const validUnit = Object.keys(units);
const unit = maxLogSize.substr(maxLogSize.length - 1).toLocaleUpperCase();
const value = maxLogSize.substring(0, maxLogSize.length - 1).trim();
if (validUnit.indexOf(unit) < 0 || !Number.isInteger(Number(value))) {
throw Error(`maxLogSize: "${maxLogSize}" is invalid`);
} else {
return value * units[unit];
}
}
function adapter(configAdapter, config) {
Object.keys(configAdapter).forEach((key) => {
if (config[key]) {
config[key] = configAdapter[key](config[key]);
}
});
}
function fileAppenderAdapter(config) {
const configAdapter = {
maxLogSize: maxFileSizeUnitTransform
};
adapter(configAdapter, config);
}
function fileSyncAppenderAdapter(config) {
const configAdapter = {
maxLogSize: maxFileSizeUnitTransform
};
adapter(configAdapter, config);
}
const appenderAdapter = {
file: fileAppenderAdapter,
fileSync: fileSyncAppenderAdapter
};
module.exports = appenderAdapter;

View File

@ -4,6 +4,7 @@ const util = require('util');
const path = require('path');
const levels = require('./levels');
const layouts = require('./layouts');
const appenderAdapter = require('./appender-adapter');
const debug = require('debug')('log4js:configuration');
let cluster;
@ -69,6 +70,11 @@ class Configuration {
createAppender(name, config) {
const appenderModule = this.loadAppenderModule(config.type);
if (appenderAdapter[config.type]) {
appenderAdapter[config.type](config);
}
this.throwExceptionIf(
not(appenderModule),
`appender "${name}" is not valid (type "${config.type}" could not be found)`

View File

@ -110,6 +110,49 @@ test('log4js fileAppender', (batch) => {
}, 100);
});
batch.test('with a max file size in unit mode and no backups', (t) => {
const testFile = path.join(__dirname, 'fa-maxFileSize-unit-test.log');
const logger = log4js.getLogger('max-file-size-unit');
t.tearDown(() => {
removeFile(testFile);
removeFile(`${testFile}.1`);
});
removeFile(testFile);
removeFile(`${testFile}.1`);
// log file of 1K = 1024 bytes maximum, no backups
log4js.configure({
appenders: {
file: {
type: 'file', filename: testFile, maxLogSize: '1K', backups: 0
}
},
categories: {
default: { appenders: ['file'], level: 'debug' }
}
});
const maxLine = 13;
for (let i = 0; i < maxLine; i++) {
logger.info('This is the first log message.');
}
logger.info('This is the second log message.');
// wait for the file system to catch up
setTimeout(() => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
t.include(fileContents, 'This is the second log message.');
t.equal(fileContents.indexOf('This is the first log message.'), -1);
fs.readdir(__dirname, (e, files) => {
const logFiles = files.filter(file => file.includes('fa-maxFileSize-unit-test.log'));
t.equal(logFiles.length, 2, 'should be 2 files');
t.end();
});
});
}, 100);
});
batch.test('with a max file size and 2 backups', (t) => {
const testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-test.log');
const logger = log4js.getLogger('max-file-size-backups');

View File

@ -84,6 +84,52 @@ test('log4js fileSyncAppender', (batch) => {
t.end();
});
batch.test('with a max file size in unit mode and no backups', (t) => {
const testFile = path.join(__dirname, '/fa-maxFileSize-unit-sync-test.log');
const logger = log4js.getLogger('max-file-size-unit');
remove(testFile);
remove(`${testFile}.1`);
t.tearDown(() => {
remove(testFile);
remove(`${testFile}.1`);
});
// log file of 100 bytes maximum, no backups
log4js.configure({
appenders: {
sync: {
type: 'fileSync', filename: testFile, maxLogSize: '1K', backups: 0
}
},
categories: { default: { appenders: ['sync'], level: 'debug' } }
});
const maxLine = 13;
for (let i = 0; i < maxLine; i++) {
logger.info('This is the first log message.');
}
logger.info('This is the second log message.');
t.test('log file should only contain the second message', (assert) => {
fs.readFile(testFile, 'utf8', (err, fileContents) => {
assert.include(fileContents, `This is the second log message.${EOL}`);
assert.equal(fileContents.indexOf('This is the first log message.'), -1);
assert.end();
});
});
t.test('there should be two test files', (assert) => {
fs.readdir(__dirname, (err, files) => {
const logFiles = files.filter(file => file.includes('fa-maxFileSize-unit-sync-test.log'));
assert.equal(logFiles.length, 2);
assert.end();
});
});
t.end();
});
batch.test('with a max file size and 2 backups', (t) => {
const testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log');
const logger = log4js.getLogger('max-file-size-backups');

4
types/log4js.d.ts vendored
View File

@ -107,7 +107,7 @@ export interface FileAppender {
// the path of the file where you want your logs written.
filename: string;
// the maximum size (in bytes) for the log file. If not specified, then no log rolling will happen.
maxLogSize?: number;
maxLogSize?: number | string;
// (default value = 5) - the number of old log files to keep during log rolling.
backups?: number;
// defaults to basic layout
@ -126,7 +126,7 @@ export interface SyncfileAppender {
// the path of the file where you want your logs written.
filename: string;
// the maximum size (in bytes) for the log file. If not specified, then no log rolling will happen.
maxLogSize?: number;
maxLogSize?: number | string;
// (default value = 5) - the number of old log files to keep during log rolling.
backups?: number;
// defaults to basic layout