Ensure first-class support for transpiled & ts code in transports (#1381)

This commit is contained in:
Andrés Correa Casablanca 2022-04-19 01:02:10 +02:00 committed by GitHub
parent be9d709171
commit 59a1f2742d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 611 additions and 36 deletions

4
.gitignore vendored
View File

@ -57,3 +57,7 @@ yarn.lock
package-lock.json
!test/fixtures/eval/node_modules
# Generated files
test/fixtures/ts/*js
!test/fixtures/ts/transpile.cjs

View File

@ -236,6 +236,22 @@ logger.info('hello world')
__NOTE: there is no "default" destination for a pipeline but
a terminating target, i.e. a `Writable` stream.__
### TypeScript compatibility
Pino provides basic support for transports written in TypeScript.
Ideally, they should be transpiled to ensure maximum compatibility, but some
times you might want to use tools such as TS-Node, to execute your TypeScript
code without having to go through an explicit transpilation step.
You can use your TypeScript code without explicit transpilation, but there are
some known caveats:
- For "pure" TypeScript code, ES imports are still not supported (ES imports are
supported once the code is transpiled).
- Only TS-Node is supported for now, there's no TSM support.
- Running transports TypeScript code on TS-Node seems to be problematic on
Windows systems, there's no official support for that yet.
### Notable transports
#### `pino/file`

47
lib/transport-stream.js Normal file
View File

@ -0,0 +1,47 @@
'use strict'
const { realImport, realRequire } = require('real-require')
module.exports = loadTransportStreamBuilder
/**
* Loads & returns a function to build transport streams
* @param {string} target
* @returns {function(object): Promise<import('stream').Writable>}
* @throws {Error} In case the target module does not export a function
*/
async function loadTransportStreamBuilder (target) {
let fn
try {
const toLoad = 'file://' + target
if (toLoad.endsWith('.ts') || toLoad.endsWith('.cts')) {
// TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
if (process[Symbol.for('ts-node.register.instance')]) {
realRequire('ts-node/register')
} else if (process.env && process.env.TS_NODE_DEV) {
realRequire('ts-node-dev')
}
// TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
fn = realRequire(decodeURIComponent(target))
} else {
fn = (await realImport(toLoad))
}
} catch (error) {
// See this PR for details: https://github.com/pinojs/thread-stream/pull/34
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) {
fn = realRequire(target)
} else {
throw error
}
}
// Depending on how the default export is performed, and on how the code is
// transpiled, we may find cases of two nested "default" objects.
// See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
if (typeof fn === 'object') fn = fn.default
if (typeof fn === 'object') fn = fn.default
if (typeof fn !== 'function') throw Error('exported worker is not a function')
return fn
}

View File

@ -1,7 +1,7 @@
'use strict'
const EE = require('events')
const { realImport, realRequire } = require('real-require')
const loadTransportStreamBuilder = require('./transport-stream')
const { pipeline, PassThrough } = require('stream')
// This file is not checked by the code coverage tool,
@ -11,18 +11,7 @@ const { pipeline, PassThrough } = require('stream')
module.exports = async function ({ targets }) {
const streams = await Promise.all(targets.map(async (t) => {
let fn
try {
const toLoad = 'file://' + t.target
fn = (await realImport(toLoad)).default
} catch (error) {
// See this PR for details: https://github.com/pinojs/thread-stream/pull/34
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) {
fn = realRequire(t.target)
} else {
throw error
}
}
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return stream
}))

View File

@ -2,7 +2,7 @@
const pino = require('../pino.js')
const build = require('pino-abstract-transport')
const { realImport, realRequire } = require('real-require')
const loadTransportStreamBuilder = require('./transport-stream')
// This file is not checked by the code coverage tool,
// as it is not reliable.
@ -11,18 +11,7 @@ const { realImport, realRequire } = require('real-require')
module.exports = async function ({ targets, levels }) {
targets = await Promise.all(targets.map(async (t) => {
let fn
try {
const toLoad = 'file://' + t.target
fn = (await realImport(toLoad)).default
} catch (error) {
// See this PR for details: https://github.com/pinojs/thread-stream/pull/34
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) {
fn = realRequire(t.target)
} else {
throw error
}
}
const fn = await loadTransportStreamBuilder(t.target)
const stream = await fn(t.options)
return {
level: t.level,

View File

@ -23,12 +23,13 @@
"docs": "docsify serve",
"browser-test": "airtap --local 8080 test/browser*test.js",
"lint": "eslint .",
"test": "npm run lint && tap && jest test/jest && npm run test-types",
"test-ci": "npm run lint && tap --no-check-coverage --coverage-report=lcovonly && npm run test-types",
"test-ci-pnpm": "pnpm run lint && tap --no-coverage --no-check-coverage && pnpm run test-types",
"test-ci-yarn-pnp": "yarn run lint && tap --no-check-coverage --coverage-report=lcovonly",
"test": "npm run lint && npm run transpile && tap --ts && jest test/jest && npm run test-types",
"test-ci": "npm run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly && npm run test-types",
"test-ci-pnpm": "pnpm run lint && npm run transpile && tap --ts --no-coverage --no-check-coverage && pnpm run test-types",
"test-ci-yarn-pnp": "yarn run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly",
"test-types": "tsc && tsd && ts-node test/types/pino.ts",
"cov-ui": "tap --coverage-report=html",
"transpile": "node ./test/fixtures/ts/transpile.cjs",
"cov-ui": "tap --ts --coverage-report=html",
"bench": "node benchmarks/utils/runbench all",
"bench-basic": "node benchmarks/utils/runbench basic",
"bench-object": "node benchmarks/utils/runbench object",
@ -67,7 +68,9 @@
},
"homepage": "http://getpino.io",
"devDependencies": {
"@types/flush-write-stream": "^1.0.0",
"@types/node": "^17.0.0",
"@types/tap": "^15.0.6",
"airtap": "4.0.4",
"benchmark": "^2.1.4",
"bole": "^4.0.0",
@ -97,7 +100,7 @@
"tap": "^16.0.0",
"tape": "^5.0.0",
"through2": "^4.0.0",
"ts-node": "^10.3.0",
"ts-node": "^10.7.0",
"tsd": "^0.20.0",
"typescript": "^4.4.4",
"winston": "^3.3.3"

View File

@ -0,0 +1,3 @@
module.exports = {
completelyUnrelatedProperty: 'Just a very incorrect transport worker implementation'
}

View File

@ -0,0 +1,18 @@
import * as fs from 'fs'
import { once } from 'events'
import { Transform } from 'stream'
async function run (opts: { destination?: fs.PathLike }): Promise<Transform> {
if (!opts.destination) throw new Error('kaboom')
const stream = fs.createWriteStream(opts.destination)
await once(stream, 'open')
const t = new Transform({
transform (chunk, enc, cb) {
setImmediate(cb, null, chunk.toString().toUpperCase())
}
})
t.pipe(stream)
return t
}
export default run

11
test/fixtures/ts/to-file-transport.ts vendored Normal file
View File

@ -0,0 +1,11 @@
import * as fs from 'fs'
import { once } from 'events'
async function run (opts: { destination?: fs.PathLike }): Promise<fs.WriteStream> {
if (!opts.destination) throw new Error('kaboom')
const stream = fs.createWriteStream(opts.destination, { encoding: 'utf8' })
await once(stream, 'open')
return stream
}
export default run

40
test/fixtures/ts/transpile.cjs vendored Executable file
View File

@ -0,0 +1,40 @@
#!/usr/bin/env node
const execa = require('execa')
const fs = require('fs')
const existsSync = fs.existsSync
const stat = fs.promises.stat
// Hardcoded parameters
const esVersions = ['es5', 'es6', 'es2017', 'esnext']
const filesToTranspile = ['to-file-transport.ts']
async function transpile () {
process.chdir(__dirname)
const runner = (process.env.npm_config_user_agent || '').match(/yarn/)
? 'yarn'
: 'npx'
for (const sourceFileName of filesToTranspile) {
const sourceStat = await stat(sourceFileName)
for (const esVersion of esVersions) {
const intermediateFileName = sourceFileName.replace(/\.ts$/, '.js')
const targetFileName = sourceFileName.replace(/\.ts$/, `.${esVersion}.cjs`)
const shouldTranspile = !existsSync(targetFileName) || (await stat(targetFileName)).mtimeMs < sourceStat.mtimeMs
if (shouldTranspile) {
await execa(runner, ['tsc', '--target', esVersion, '--module', 'commonjs', sourceFileName])
await execa('mv', [intermediateFileName, targetFileName])
}
}
}
}
transpile().catch(err => {
process.exitCode = 1
throw err
})

View File

@ -0,0 +1,15 @@
import pino from '../../..'
import { join } from 'path'
const transport = pino.transport({
target: join(__dirname, 'to-file-transport-with-transform.ts'),
options: {
destination: process.argv[2]
}
})
const logger = pino(transport)
logger.info('Hello')
logger.info('World')
process.exit(0)

View File

@ -0,0 +1,10 @@
import pino from '../../..'
const transport = pino.transport({
target: 'pino/file'
})
const logger = pino(transport)
logger.info('Hello')
process.exit(0)

View File

@ -0,0 +1,11 @@
import pino from '../../..'
const transport = pino.transport({
target: 'pino/file'
})
const logger = pino(transport)
transport.on('ready', function () {
logger.info('Hello')
process.exit(0)
})

8
test/fixtures/ts/transport-main.ts vendored Normal file
View File

@ -0,0 +1,8 @@
import { join } from 'path'
import pino from '../../..'
const transport = pino.transport({
target: join(__dirname, 'transport-worker.ts')
})
const logger = pino(transport)
logger.info('Hello')

View File

@ -0,0 +1,8 @@
import pino from '../../..'
const transport = pino.transport({
target: 'pino/file',
options: { destination: '1' }
})
const logger = pino(transport)
logger.info('Hello')

14
test/fixtures/ts/transport-worker.ts vendored Normal file
View File

@ -0,0 +1,14 @@
import { Writable } from 'stream'
export default (): Writable => {
const myTransportStream = new Writable({
autoDestroy: true,
write (chunk, _enc, cb) {
console.log(chunk.toString())
cb()
},
defaultEncoding: 'utf8'
})
return myTransportStream
}

4
test/helper.d.ts vendored Normal file
View File

@ -0,0 +1,4 @@
import { PathLike } from 'fs'
export declare function watchFileCreated(filename: PathLike): Promise<void>
export declare function watchForWrite(filename: PathLike, testString: string): Promise<void>

View File

@ -2,7 +2,6 @@
const { test } = require('tap')
const { join } = require('path')
const { once } = require('events')
const { createReadStream } = require('fs')
const { promisify } = require('util')
const execa = require('execa')
@ -16,10 +15,17 @@ const sleep = promisify(setTimeout)
test('eight million lines', async ({ equal, comment }) => {
const destination = file()
const child = execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-many-lines.js'), destination])
await execa(process.argv[0], [join(__dirname, '..', 'fixtures', 'transport-many-lines.js'), destination])
if (process.platform !== 'win32') {
try {
await execa('sync') // Wait for the file to be writen to disk
} catch {
// Just a fallback, this should be unreachable
}
}
await sleep(1000) // It seems that sync is not enough (even in POSIX systems)
await once(child, 'exit')
await sleep(1000) // wait for the file to be written
const toWrite = 8 * 1000000
let count = 0
await pipeline(createReadStream(destination), split(), new Writable({

View File

@ -80,6 +80,32 @@ test('pino.transport errors if file does not exists', ({ plan, pass }) => {
})
})
test('pino.transport errors if transport worker module does not export a function', ({ plan, equal }) => {
// TODO: add case for non-pipelined single target (needs changes in thread-stream)
plan(2)
const manyTargetsInstance = pino.transport({
targets: [{
level: 'info',
target: join(__dirname, '..', 'fixtures', 'transport-wrong-export-type.js')
}, {
level: 'info',
target: join(__dirname, '..', 'fixtures', 'transport-wrong-export-type.js')
}]
})
manyTargetsInstance.on('error', function (e) {
equal(e.message, 'exported worker is not a function')
})
const pipelinedInstance = pino.transport({
pipeline: [{
target: join(__dirname, '..', 'fixtures', 'transport-wrong-export-type.js')
}]
})
pipelinedInstance.on('error', function (e) {
equal(e.message, 'exported worker is not a function')
})
})
test('pino.transport with esm', async ({ same, teardown }) => {
const destination = file()
const transport = pino.transport({

236
test/transport/core.test.ts Normal file
View File

@ -0,0 +1,236 @@
import * as os from 'os'
import { join } from 'path'
import { once } from 'events'
import fs from 'fs'
import { watchFileCreated } from '../helper'
import { test } from 'tap'
import pino from '../../'
import * as url from 'url'
import { default as strip } from 'strip-ansi'
import execa from 'execa'
import writer from 'flush-write-stream'
if (process.platform === 'win32') {
// TODO: Implement .ts files loading support for Windows
process.exit()
}
const readFile = fs.promises.readFile
const { pid } = process
const hostname = os.hostname()
test('pino.transport with file', async ({ same, teardown }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts'),
options: { destination }
})
teardown(transport.end.bind(transport))
const instance = pino(transport)
instance.info('hello')
await watchFileCreated(destination)
const result = JSON.parse(await readFile(destination, { encoding: 'utf8' }))
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('pino.transport with file (no options + error handling)', async ({ equal }) => {
const transport = pino.transport({
target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts')
})
const [err] = await once(transport, 'error')
equal(err.message, 'kaboom')
})
test('pino.transport with file URL', async ({ same, teardown }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
target: url.pathToFileURL(join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts')).href,
options: { destination }
})
teardown(transport.end.bind(transport))
const instance = pino(transport)
instance.info('hello')
await watchFileCreated(destination)
const result = JSON.parse(await readFile(destination, { encoding: 'utf8' }))
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('pino.transport with two files', async ({ same, teardown }) => {
const dest1 = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const dest2 = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
targets: [{
level: 'info',
target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts'),
options: { destination: dest1 }
}, {
level: 'info',
target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts'),
options: { destination: dest2 }
}]
})
teardown(transport.end.bind(transport))
const instance = pino(transport)
instance.info('hello')
await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)])
const result1 = JSON.parse(await readFile(dest1, { encoding: 'utf8' }))
delete result1.time
same(result1, {
pid,
hostname,
level: 30,
msg: 'hello'
})
const result2 = JSON.parse(await readFile(dest2, { encoding: 'utf8' }))
delete result2.time
same(result2, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('no transport.end()', async ({ same, teardown }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts'),
options: { destination }
})
const instance = pino(transport)
instance.info('hello')
await watchFileCreated(destination)
const result = JSON.parse(await readFile(destination, { encoding: 'utf8' }))
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('autoEnd = false', async ({ equal, same, teardown }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const count = process.listenerCount('exit')
const transport = pino.transport({
target: join(__dirname, '..', 'fixtures', 'ts', 'to-file-transport.ts'),
options: { destination },
worker: { autoEnd: false }
})
teardown(transport.end.bind(transport))
await once(transport, 'ready')
const instance = pino(transport)
instance.info('hello')
await watchFileCreated(destination)
equal(count, process.listenerCount('exit'))
const result = JSON.parse(await readFile(destination, { encoding: 'utf8' }))
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test('stdout in worker', async ({ not }) => {
let actual = ''
const child = execa(process.argv[0], ['-r', 'ts-node/register', join(__dirname, '..', 'fixtures', 'ts', 'transport-main.ts')])
child.stdout?.pipe(writer((s, enc, cb) => {
actual += s
cb()
}))
await once(child, 'close')
not(strip(actual).match(/Hello/), null)
})
test('log and exit on ready', async ({ not }) => {
let actual = ''
const child = execa(process.argv[0], ['-r', 'ts-node/register', join(__dirname, '..', 'fixtures', 'ts', 'transport-exit-on-ready.ts')])
child.stdout?.pipe(writer((s, enc, cb) => {
actual += s
cb()
}))
await once(child, 'close')
not(strip(actual).match(/Hello/), null)
})
test('log and exit before ready', async ({ not }) => {
let actual = ''
const child = execa(process.argv[0], ['-r', 'ts-node/register', join(__dirname, '..', 'fixtures', 'ts', 'transport-exit-immediately.ts')])
child.stdout?.pipe(writer((s, enc, cb) => {
actual += s
cb()
}))
await once(child, 'close')
not(strip(actual).match(/Hello/), null)
})
test('log and exit before ready with async dest', async ({ not }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const child = execa(process.argv[0], ['-r', 'ts-node/register', join(__dirname, '..', 'fixtures', 'ts', 'transport-exit-immediately-with-async-dest.ts'), destination])
await once(child, 'exit')
const actual = await readFile(destination, { encoding: 'utf8' })
not(strip(actual).match(/HELLO/), null)
not(strip(actual).match(/WORLD/), null)
})
test('string integer destination', async ({ not }) => {
let actual = ''
const child = execa(process.argv[0], ['-r', 'ts-node/register', join(__dirname, '..', 'fixtures', 'ts', 'transport-string-stdout.ts')])
child.stdout?.pipe(writer((s, enc, cb) => {
actual += s
cb()
}))
await once(child, 'close')
not(strip(actual).match(/Hello/), null)
})

View File

@ -0,0 +1,116 @@
import * as os from 'os'
import { join } from 'path'
import { once } from 'events'
import fs from 'fs'
import { watchFileCreated } from '../helper'
import { test } from 'tap'
import pino from '../../'
import * as url from 'url'
import { default as strip } from 'strip-ansi'
import execa from 'execa'
import writer from 'flush-write-stream'
const readFile = fs.promises.readFile
const { pid } = process
const hostname = os.hostname()
// A subset of the test from core.test.js, we don't need all of them to check for compatibility
function runTests(esVersion: string): void {
test(`(ts -> ${esVersion}) pino.transport with file`, async ({ same, teardown }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`),
options: { destination }
})
teardown(transport.end.bind(transport))
const instance = pino(transport)
instance.info('hello')
await watchFileCreated(destination)
const result = JSON.parse(await readFile(destination, { encoding: 'utf8' }))
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test(`(ts -> ${esVersion}) pino.transport with file URL`, async ({ same, teardown }) => {
const destination = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
target: url.pathToFileURL(join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`)).href,
options: { destination }
})
teardown(transport.end.bind(transport))
const instance = pino(transport)
instance.info('hello')
await watchFileCreated(destination)
const result = JSON.parse(await readFile(destination, { encoding: 'utf8' }))
delete result.time
same(result, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
test(`(ts -> ${esVersion}) pino.transport with two files`, async ({ same, teardown }) => {
const dest1 = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const dest2 = join(
os.tmpdir(),
'_' + Math.random().toString(36).substr(2, 9)
)
const transport = pino.transport({
targets: [{
level: 'info',
target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`),
options: { destination: dest1 }
}, {
level: 'info',
target: join(__dirname, '..', 'fixtures', 'ts', `to-file-transport.${esVersion}.cjs`),
options: { destination: dest2 }
}]
})
teardown(transport.end.bind(transport))
const instance = pino(transport)
instance.info('hello')
await Promise.all([watchFileCreated(dest1), watchFileCreated(dest2)])
const result1 = JSON.parse(await readFile(dest1, { encoding: 'utf8' }))
delete result1.time
same(result1, {
pid,
hostname,
level: 30,
msg: 'hello'
})
const result2 = JSON.parse(await readFile(dest2, { encoding: 'utf8' }))
delete result2.time
same(result2, {
pid,
hostname,
level: 30,
msg: 'hello'
})
})
}
runTests('es5')
runTests('es6')
runTests('es2017')
runTests('esnext')

View File

@ -5,6 +5,7 @@
"module": "commonjs",
"noEmit": true,
"strict": true,
"esModuleInterop": true,
},
"exclude": [
"./test/types/*.test-d.ts",