refactor: appenders/clustered.js

This commit is contained in:
e-cloud 2016-07-15 19:47:55 +08:00
parent 311dea74af
commit 1ab318c991

View File

@ -1,7 +1,5 @@
"use strict";
var cluster = require('cluster');
var log4js = require('../log4js');
const cluster = require('cluster');
const log4js = require('../log4js');
/**
* Takes a loggingEvent object, returns string representation of it.
@ -9,8 +7,8 @@ var log4js = require('../log4js');
function serializeLoggingEvent(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
for (var i = 0; i < loggingEvent.data.length; i++) {
var item = loggingEvent.data[i];
for (let i = 0; i < loggingEvent.data.length; i++) {
const item = loggingEvent.data[i];
// Validate that we really are in this case
if (item && item.stack && JSON.stringify(item) === '{}') {
loggingEvent.data[i] = { stack: item.stack };
@ -32,24 +30,20 @@ function serializeLoggingEvent(loggingEvent) {
* processing by log4js internals.
*/
function deserializeLoggingEvent(loggingEventString) {
var loggingEvent;
let loggingEvent;
try {
loggingEvent = JSON.parse(loggingEventString);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
// Unwrap serialized errors
for (var i = 0; i < loggingEvent.data.length; i++) {
var item = loggingEvent.data[i];
for (let i = 0; i < loggingEvent.data.length; i++) {
const item = loggingEvent.data[i];
if (item && item.stack) {
loggingEvent.data[i] = item.stack;
}
}
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
loggingEvent = {
startTime: new Date(),
@ -73,14 +67,11 @@ function deserializeLoggingEvent(loggingEventString) {
* Or better use `configure(config, options)`
*/
function createAppender(config) {
if (cluster.isMaster) {
var masterAppender = function (loggingEvent) {
const masterAppender = loggingEvent => {
if (config.actualAppenders) {
var size = config.actualAppenders.length;
for (var i = 0; i < size; i++) {
const size = config.actualAppenders.length;
for (let i = 0; i < size; i++) {
if (
!config.appenders[i].category ||
config.appenders[i].category === loggingEvent.categoryName
@ -94,11 +85,10 @@ function createAppender(config) {
};
// Listen on new workers
cluster.on('fork', function (worker) {
worker.on('message', function (message) {
cluster.on('fork', worker => {
worker.on('message', message => {
if (message.type && message.type === '::log-message') {
var loggingEvent = deserializeLoggingEvent(message.event);
const loggingEvent = deserializeLoggingEvent(message.event);
// Adding PID metadata
loggingEvent.pid = worker.process.pid;
@ -111,43 +101,36 @@ function createAppender(config) {
masterAppender(loggingEvent);
}
});
});
return masterAppender;
} else {
return function (loggingEvent) {
// If inside the worker process, then send the logger event to master.
if (cluster.isWorker) {
// console.log("worker " + cluster.worker.id + " is sending message");
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent) });
}
};
}
return loggingEvent => {
// If inside the worker process, then send the logger event to master.
if (cluster.isWorker) {
// console.log("worker " + cluster.worker.id + " is sending message");
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent) });
}
};
}
function configure(config, options) {
if (config.appenders && cluster.isMaster) {
var size = config.appenders.length;
const size = config.appenders.length;
config.actualAppenders = new Array(size);
for (var i = 0; i < size; i++) {
for (let i = 0; i < size; i++) {
log4js.loadAppender(config.appenders[i].type);
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](
config.appenders[i],
options
);
}
}
return createAppender(config);
}
exports.appender = createAppender;
exports.configure = configure;
module.exports.appender = createAppender;
module.exports.configure = configure;