chore(merge): manual merge from v2.5.0

This commit is contained in:
Gareth Jones 2018-01-18 08:19:15 +11:00
commit b44794437d
14 changed files with 6871 additions and 685 deletions

View File

@ -17,13 +17,16 @@ Out of the box it supports the following features:
* SMTP appender
* GELF appender
* Loggly appender
* Logstash UDP appender
* Logstash (UDP and HTTP) appender
* logFaces (UDP and HTTP) appender
* multiprocess appender (useful when you've got multiple servers)
* a logger for connect/express servers
* configurable log message layout/patterns
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
## Getting help
Having problems? Jump on the [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtMjk5OTcxODMwNDA1LTk5ZTA0YjcwNWRiYmFkNGQyZTkyZTYzYTFiYTE2NTRhNzFmNmY3OTdjZTY3MWM3M2RlMGQxN2ZlMmY4ZDFmZWY) channel, or create an issue. If you want to help out with the development, the slack channel is a good place to go as well.
## installation
```bash
@ -89,7 +92,8 @@ configure({
```
## Contributing
Contributions welcome, but take a look at the [rules](https://log4js-node.github.io/log4js-node/contrib-guidelines.html) first.
We're always looking for people to help out. Jump on [slack](https://join.slack.com/t/log4js-node/shared_invite/enQtMjk5OTcxODMwNDA1LTk5ZTA0YjcwNWRiYmFkNGQyZTkyZTYzYTFiYTE2NTRhNzFmNmY3OTdjZTY3MWM3M2RlMGQxN2ZlMmY4ZDFmZWY) and discuss what you want to do. Also, take a look at the [rules](https://log4js-node.github.io/log4js-node/contrib-guidelines.html) before submitting a pull request.
## License

View File

@ -30,6 +30,7 @@ The following appenders are included with log4js. Some require extra dependencie
* [logFaces-UDP](logFaces-UDP.md)
* [loggly](loggly.md)
* [logLevelFilter](logLevelFilter.md)
* [logstashHTTP](logstashHTTP.md)
* [logstashUDP](logstashUDP.md)
* [mailgun](mailgun.md)
* [multiFile](multiFile.md)
@ -40,6 +41,7 @@ The following appenders are included with log4js. Some require extra dependencie
* [smtp](smtp.md)
* [stderr](stderr.md)
* [stdout](stdout.md)
* [rabbitmq](rabbitmq.md)
## Other Appenders

33
docs/logstashHTTP.md Normal file
View File

@ -0,0 +1,33 @@
# logstash Appender (HTTP)
The logstash appenders send NDJSON formatted log events to [logstash](https://www.elastic.co/products/logstash) receivers. This appender uses HTTP to send the events (there is another logstash appender that uses [UDP](logstashUDP.md)). You will need to include [axios](https://www.npmjs.com/package/axios) in your dependencies to use this appender.
## Configuration
* `type` - `logstashHTTP`
* `url` - `string` - logFaces receiver servlet URL
* `application` - `string` (optional) - used to identify your application's logs
* `logChannel` - `string` (optional) - also used to identify your application's logs [but in a more specific way]
* `logType` - `string` (optional) - used for the `type` field in the logstash data
* `timeout` - `integer` (optional, defaults to 5000ms) - the timeout for the HTTP request.
This appender will also pick up Logger context values from the events, and add them as `p_` values in the logFaces event. See the example below for more details.
# Example (default config)
```javascript
log4js.configure({
appenders: {
logstash: { type: 'logstashHTTP', url: 'http://localhost:9200/_bulk', application: 'logstash-log4js', logType: 'application', logChannel: 'node' }
},
categories: {
default: { appenders: [ 'logstash' ], level: 'info' }
}
});
const logger = log4js.getLogger();
logger.addContext('requestId', '123');
logger.info('some interesting log message');
logger.error('something has gone wrong');
```
This example will result in two log events being sent to your `localhost:9200`. Both events will have a `context.requestId` property with a value of `123`.

41
docs/rabbitmq.md Normal file
View File

@ -0,0 +1,41 @@
# Rabbitmq Appender
Push log events to a [Rabbitmq](https://www.rabbitmq.com/) MQ. You will need to include the [amqplib](https://www.npmjs.com/package/amqplib) package in your application's dependencies to use this appender.
## Configuration
* `type` - `rabbitmq`
* `host` - `string` (optional, defaults to `127.0.0.1`) - the location of the rabbitmq server
* `port` - `integer` (optional, defaults to `5672`) - the port the rabbitmq server is listening on
* `username` - `string` (optional, defaults to `guest`) - username to use when authenticating connection to rabbitmq
* `password` - `string` (optional, defaults to `guest`) - password to use when authenticating connection to rabbitmq
* `routing_key` - `string` (optional, defaults to `logstash`) - rabbitmq message's routing_key
* `durable` - `string` (optional, defaults to false) - will that RabbitMQ lose our queue.
* `exchange` - `string` - rabbitmq send message's exchange
* `mq_type` - `string` - rabbitmq message's mq_type
* `layout` - `object` (optional, defaults to `messagePassThroughLayout`) - the layout to use for log events (see [layouts](layouts.md)).
The appender will use the Rabbitmq Routing model command to send the log event messages to the channel.
## Example
```javascript
log4js.configure({
appenders: {
mq: {
type: 'rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true
}
},
categories: { default: { appenders: ['mq'], level: 'info' } }
});
```
This configuration will push log messages to the rabbitmq on `127.0.0.1:5672`.

26
examples/logstashHTTP.js Normal file
View File

@ -0,0 +1,26 @@
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
console: {
type: 'console'
},
logstash: {
url: 'http://172.17.0.5:9200/_bulk',
type: 'logstashHTTP',
logType: 'application',
logChannel: 'node',
application: 'logstash-log4js',
layout: {
type: 'pattern',
pattern: '%m'
}
}
},
categories: {
default: { appenders: ['console', 'logstash'], level: 'info' }
}
});
const logger = log4js.getLogger('myLogger');
logger.info('Test log message %s', 'arg1', 'arg2');

49
examples/rabbitmq-appender.js Executable file
View File

@ -0,0 +1,49 @@
// Note that rabbitmq appender needs install amqplib to work.
const log4js = require('../lib/log4js');
log4js.configure({
appenders: {
out: {
type: 'console'
},
file: {
type: 'dateFile',
filename: 'logs/log.txt',
pattern: 'yyyyMMdd',
alwaysIncludePattern: false
},
mq: {
type: 'rabbitmq',
host: '127.0.0.1',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
layout: {
type: 'pattern',
pattern: '%d{yyyy-MM-dd hh:mm:ss:SSS}#%p#%m'
}
}
},
categories: {
default: { appenders: ['out'], level: 'info' },
dateFile: { appenders: ['file'], level: 'info' },
rabbitmq: { appenders: ['mq'], level: 'info' }
}
});
const log = log4js.getLogger('console');
const logRabbitmq = log4js.getLogger('rabbitmq');
function doTheLogging(x) {
log.info('Logging something %d', x);
logRabbitmq.info('Logging something %d', x);
}
for (let i = 0; i < 500; i += 1) {
doTheLogging(i);
}

View File

@ -0,0 +1,91 @@
/**
* logstashHTTP appender sends JSON formatted log events to logstashHTTP receivers.
*
* HTTP require 'axios', see 'https://www.npmjs.com/package/axios'
*
* Make sure your project have relevant dependancy installed before using this appender.
*/
/* eslint global-require:0 */
'use strict';
const util = require('util');
const axios = require('axios');
/**
*
* For HTTP (browsers or node.js) use the following configuration params:
* {
* "type": "logstashHTTP", // must be present for instantiation
* "application": "logstash-test", // name of the application
* "logType": "application", // type of the application
* "logChannel": "test", // channel of the application
* "url": "http://lfs-server/_bulk", // logstash receiver servlet URL
* }
*/
function logstashHTTPAppender(config) {
const sender = axios.create({
baseURL: config.url,
timeout: config.timeout || 5000,
headers: { 'Content-Type': 'application/x-ndjson' },
withCredentials: true,
});
return function log(event) {
const logstashEvent = [
{
index: {
_index: config.application,
_type: config.logType,
},
},
{
message: format(event.data), // eslint-disable-line
context: event.context,
level: event.level.level / 100,
level_name: event.level.levelStr,
channel: config.logChannel,
datetime: (new Date(event.startTime)).toISOString(),
extra: {},
},
];
const logstashJSON = `${JSON.stringify(logstashEvent[0])}\n${JSON.stringify(logstashEvent[1])}\n`;
// send to server
sender.post('', logstashJSON)
.catch((error) => {
if (error.response) {
console.error(`log4js.logstashHTTP Appender error posting to ${config.url}: ${error.response.status} - ${error.response.data}`);
return;
}
console.error(`log4js.logstashHTTP Appender error: ${error.message}`);
});
};
}
function configure(config) {
return logstashHTTPAppender(config);
}
function format(logData) {
const data = Array.isArray(logData)
? logData
: Array.prototype.slice.call(arguments);
return util.format.apply(util, wrapErrorsWithInspect(data));
}
function wrapErrorsWithInspect(items) {
return items.map((item) => {
if ((item instanceof Error) && item.stack) {
return {
inspect: function () {
return `${util.format(item)}\n${item.stack}`;
}
};
}
return item;
});
}
module.exports.configure = configure;

61
lib/appenders/rabbitmq.js Normal file
View File

@ -0,0 +1,61 @@
'use strict';
const amqplib = require('amqplib');
function rabbitmqAppender(config, layout) {
const host = config.host || '127.0.0.1';
const port = config.port || 5672;
const username = config.username || 'guest';
const password = config.password || 'guest';
const exchange = config.exchange || '';
const type = config.mq_type || '';
const durable = config.durable || false;
const routingKey = config.routing_key || 'logstash';
const con = {
protocol: 'amqp',
hostname: host,
port: port,
username: username,
password: password,
locale: 'en_US',
frameMax: 0,
heartbeat: 0,
vhost: '/',
routing_key: routingKey,
exchange: exchange,
mq_type: type,
durable: durable,
};
const clientconn = amqplib.connect(con);
clientconn.publish = amqplib.connect(con).publish ? amqplib.connect(con).publish : (client, message) => {
client.then((conn) => {
const rn = conn.createChannel().then((ch) => {
const ok = ch.assertExchange(exchange, type, { durable: durable });
return ok.then(() => {
ch.publish(exchange, routingKey, Buffer.from(message));
return ch.close();
});
});
return rn;
}).catch(console.error);
};
function log(loggingEvent) {
const message = layout(loggingEvent);
clientconn.publish(clientconn, message);
}
log.shutdown = function () {
clientconn.close();
};
return log;
}
function configure(config, layouts) {
let layout = layouts.messagePassThroughLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return rabbitmqAppender(config, layout);
}
module.exports.configure = configure;

View File

@ -49,6 +49,9 @@ function sendLogEventToAppender(logEvent) {
* @return {Logger} instance of logger for the category
*/
function getLogger(category) {
if (!enabled) {
configure(process.env.LOG4JS_CONFIG || defaultConfig);
}
return new Logger(category || 'default');
}
@ -132,7 +135,3 @@ const log4js = {
};
module.exports = log4js;
// set ourselves up
if (process.env.LOG4JS_CONFIG) {
configure(process.env.LOG4JS_CONFIG);
}

6997
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,11 @@
{
"name": "log4js",
"version": "2.4.1",
"version": "2.5.0",
"description": "Port of Log4js to work with node.",
"homepage": "https://log4js-node.github.io/log4js-node/",
"files": [
"lib"
],
"keywords": [
"logging",
"log",
@ -38,7 +41,7 @@
"lib": "lib"
},
"dependencies": {
"circular-json": "^0.4.0",
"circular-json": "^0.5.1",
"date-format": "^1.2.0",
"debug": "^3.1.0",
"semver": "^5.3.0",
@ -64,7 +67,8 @@
"nodemailer": "^2.5.0",
"redis": "^2.7.1",
"slack-node": "~0.2.0",
"axios": "^0.15.3"
"axios": "^0.15.3",
"amqplib": "^0.5.2"
},
"browser": {
"os": false

View File

@ -37,7 +37,7 @@ test('log4js configure', (batch) => {
}
};
sandbox.require(
const log4js = sandbox.require(
'../../lib/log4js',
{
requires: {
@ -46,6 +46,8 @@ test('log4js configure', (batch) => {
}
);
log4js.getLogger('test-logger');
delete process.env.LOG4JS_CONFIG;
t.equal(fileRead, 1, 'should load the specified local config file');

View File

@ -0,0 +1,108 @@
'use strict';
const test = require('tap').test;
const sandbox = require('sandboxed-module');
function setupLogging(category, options) {
const fakeAxios = {
create: function (config) {
this.config = config;
return {
post: function (emptyString, event) {
fakeAxios.args = [emptyString, event];
return {
catch: function (cb) {
fakeAxios.errorCb = cb;
}
};
}
};
}
};
const fakeConsole = {
error: function (msg) {
this.msg = msg;
}
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
axios: fakeAxios
},
globals: {
console: fakeConsole
}
});
options.type = 'logstashHTTP';
log4js.configure({
appenders: { http: options },
categories: { default: { appenders: ['http'], level: 'trace' } }
});
return {
logger: log4js.getLogger(category),
fakeAxios: fakeAxios,
fakeConsole: fakeConsole
};
}
test('logstashappender', (batch) => {
batch.test('when using HTTP receivers', (t) => {
const setup = setupLogging('myCategory', {
application: 'logstash-sample',
logType: 'application',
logChannel: 'sample',
url: 'http://localhost/receivers/rx1'
});
t.test('axios should be configured', (assert) => {
assert.equal(setup.fakeAxios.config.baseURL, 'http://localhost/receivers/rx1');
assert.equal(setup.fakeAxios.config.timeout, 5000);
assert.equal(setup.fakeAxios.config.withCredentials, true);
assert.same(setup.fakeAxios.config.headers, { 'Content-Type': 'application/x-ndjson' });
assert.end();
});
setup.logger.addContext('foo', 'bar');
setup.logger.addContext('bar', 'foo');
setup.logger.warn('Log event #1');
t.test('an event should be sent', (assert) => {
const packet = setup.fakeAxios.args[1].split('\n');
const eventHeader = JSON.parse(packet[0]);
const eventBody = JSON.parse(packet[1]);
assert.equal(eventHeader.index._index, 'logstash-sample');
assert.equal(eventHeader.index._type, 'application');
assert.equal(eventBody.channel, 'sample');
assert.equal(eventBody.message, 'Log event #1');
assert.equal(eventBody.level_name, 'WARN');
assert.equal(eventBody.context.foo, 'bar');
assert.equal(eventBody.context.bar, 'foo');
// Assert timestamp, up to hours resolution.
const date = new Date(eventBody.datetime);
assert.equal(
date.toISOString().substring(0, 14),
new Date().toISOString().substring(0, 14)
);
assert.end();
});
t.test('errors should be sent to console.error', (assert) => {
setup.fakeAxios.errorCb({ response: { status: 500, data: 'oh no' } });
assert.equal(
setup.fakeConsole.msg,
'log4js.logstashHTTP Appender error posting to http://localhost/receivers/rx1: 500 - oh no'
);
setup.fakeAxios.errorCb(new Error('oh dear'));
assert.equal(setup.fakeConsole.msg, 'log4js.logstashHTTP Appender error: oh dear');
assert.end();
});
t.end();
});
batch.end();
});

View File

@ -0,0 +1,119 @@
'use strict';
const test = require('tap').test;
const sandbox = require('sandboxed-module');
function setupLogging(category, options) {
const fakeRabbitmq = {
msgs: [],
connect: function (conn) {
this.port = conn.port;
this.host = conn.hostname;
this.username = conn.username;
this.password = conn.password;
this.routing_key = conn.routing_key;
this.exchange = conn.exchange;
this.mq_type = conn.mq_type;
this.durable = conn.durable;
return {
publish: function (client, message) {
fakeRabbitmq.msgs.push(message);
}
};
}
};
const fakeConsole = {
errors: [],
error: function (msg) {
this.errors.push(msg);
}
};
const log4js = sandbox.require('../../lib/log4js', {
requires: {
amqplib: fakeRabbitmq,
},
globals: {
console: fakeConsole
}
});
log4js.configure({
appenders: { rabbitmq: options },
categories: { default: { appenders: ['rabbitmq'], level: 'trace' } }
});
return {
logger: log4js.getLogger(category),
fakeRabbitmq: fakeRabbitmq,
fakeConsole: fakeConsole
};
}
test('log4js rabbitmqAppender', (batch) => {
batch.test('rabbitmq setup', (t) => {
const result = setupLogging('rabbitmq setup', {
host: '123.123.123.123',
port: 5672,
username: 'guest',
password: 'guest',
routing_key: 'logstash',
exchange: 'exchange_logs',
mq_type: 'direct',
durable: true,
type: 'rabbitmq',
layout: {
type: 'pattern',
pattern: 'cheese %m'
}
});
result.logger.info('Log event #1');
t.test('rabbitmq credentials should match', (assert) => {
assert.equal(result.fakeRabbitmq.host, '123.123.123.123');
assert.equal(result.fakeRabbitmq.port, 5672);
assert.equal(result.fakeRabbitmq.username, 'guest');
assert.equal(result.fakeRabbitmq.password, 'guest');
assert.equal(result.fakeRabbitmq.routing_key, 'logstash');
assert.equal(result.fakeRabbitmq.exchange, 'exchange_logs');
assert.equal(result.fakeRabbitmq.mq_type, 'direct');
assert.equal(result.fakeRabbitmq.durable, true);
assert.equal(result.fakeRabbitmq.msgs.length, 1, 'should be one message only');
assert.equal(result.fakeRabbitmq.msgs[0], 'cheese Log event #1');
assert.end();
});
t.end();
});
batch.test('default values', (t) => {
const setup = setupLogging('defaults', {
type: 'rabbitmq'
});
setup.logger.info('just testing');
t.test('should use localhost', (assert) => {
assert.equal(setup.fakeRabbitmq.host, '127.0.0.1');
assert.equal(setup.fakeRabbitmq.port, 5672);
assert.equal(setup.fakeRabbitmq.username, 'guest');
assert.equal(setup.fakeRabbitmq.password, 'guest');
assert.equal(setup.fakeRabbitmq.exchange, '');
assert.equal(setup.fakeRabbitmq.mq_type, '');
assert.equal(setup.fakeRabbitmq.durable, false);
assert.equal(setup.fakeRabbitmq.routing_key, 'logstash');
assert.end();
});
t.test('should use message pass through layout', (assert) => {
assert.equal(setup.fakeRabbitmq.msgs.length, 1);
assert.equal(setup.fakeRabbitmq.msgs[0], 'just testing');
assert.end();
});
t.end();
});
batch.end();
});