Write Pino transports easily.
npm i pino-abstract-transport
import build from 'pino-abstract-transport'
export default async function (opts) {
return build(async function (source) {
for await (let obj of source) {
console.log(obj)
}
})
}
or in CommonJS and streams:
'use strict'
const build = require('pino-abstract-transport')
module.exports = function (opts) {
return build(function (source) {
source.on('data', function (obj) {
console.log(obj)
})
})
}
Install the type definitions for node. Make sure the major version of the type definitions matches the node version you are using.
npm i -D @types/node@16
Create a split2
instance and returns it.
This same instance is also passed to the given function, which is called
synchronously.
If opts.transform
is true
, pino-abstract-transform
will
wrap the split2 instance and the returned stream using duplexify
,
so they can be concatenated into multiple transports.
In addition to all events emitted by a Readable
stream, it emits the following events:
unknown
where an unparsable line is found, both the line and optional error is emitted.
-
parse
an option to change to data format passed to build function. When this option is set tolines
, the data is passed as a string, otherwise the data is passed as an object. Default:undefined
. -
close(err, cb)
a function that is called to shutdown the transport. It's called both on error and non-error shutdowns. It can also return a promise. In this case discard the thecb
argument. -
parseLine(line)
a function that is used to parse line received frompino
. -
expectPinoConfig
a boolean that indicates if the transport expects Pino to add some of its configuration to the stream. Default:false
.
You can allow custom parseLine
from users while providing a simple and safe default parseLine.
'use strict'
const build = require('pino-abstract-transport')
function defaultParseLine (line) {
const obj = JSON.parse(line)
// property foo will be added on each line
obj.foo = 'bar'
return obj
}
module.exports = function (opts) {
const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : defaultParseLine
return build(function (source) {
source.on('data', function (obj) {
console.log(obj)
})
}, {
parseLine: parseLine
})
}
You can pipeline multiple transports:
const build = require('pino-abstract-transport')
const { Transform, pipeline } = require('stream')
function buildTransform () {
return build(function (source) {
return new Transform({
objectMode: true,
autoDestroy: true,
transform (line, enc, cb) {
line.service = 'bob'
cb(null, JSON.stringify(line))
}
})
}, { enablePipelining: true })
}
function buildDestination () {
return build(function (source) {
source.on('data', function (obj) {
console.log(obj)
})
})
}
pipeline(process.stdin, buildTransform(), buildDestination(), function (err) {
console.log('pipeline completed!', err)
})
Setting expectPinoConfig
to true
will make the transport wait for pino to send its configuration before starting to process logs. It will add levels
, messageKey
and errorKey
to the stream.
When used with an incompatible version of pino, the stream will immediately error.
import build from 'pino-abstract-transport'
export default function (opts) {
return build(async function (source) {
for await (const obj of source) {
console.log(`[${source.levels.labels[obj.level]}]: ${obj[source.messageKey]}`)
}
}, {
expectPinoConfig: true
})
}
MIT