forked from pinojs/pino-elasticsearch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pino-elasticsearch.js
executable file
·137 lines (122 loc) · 3.26 KB
/
pino-elasticsearch.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
#! /usr/bin/env node
'use strict'
const minimist = require('minimist')
const Writable = require('readable-stream').Writable
const elasticsearch = require('elasticsearch')
const AwsElasticsearch = require('aws-es')
const Parse = require('fast-json-parse')
const split = require('split2')
const pump = require('pump')
const fs = require('fs')
const path = require('path')
function getClient(opts) {
if (!opts['aws-endpoint']) {
return new elasticsearch.Client({
host: opts.host + ':' + opts.port,
log: {
level: opts['trace-level'] || 'error'
}
})
}
else {
return new AwsElasticsearch({
accessKeyId: opts['aws-access-key'],
secretAccessKey: opts['aws-secret-key'],
service: 'es',
region: opts['aws-region'],
host: opts['aws-endpoint']
});
}
}
function pinoElasticSearch (opts) {
const splitter = split(function (line) {
var parsed = new Parse(line)
if (parsed.err) {
this.emit('unknown', line, parsed.err)
return
}
var value = parsed.value
value.time = (new Date(value.time)).toISOString()
return value
})
const index = opts.index || 'pino'
const type = opts.type || 'log'
const writable = new Writable({
objectMode: true,
highWaterMark: opts['bulk-size'] || 500,
writev: function (chunks, cb) {
const docs = new Array(chunks.length * 2)
for (var i = 0; i < docs.length; i++) {
if (i % 2 === 0) {
// add the header
docs[i] = { index: { _index: index, _type: type } }
} else {
// add the chunk
docs[i] = chunks[Math.floor(i / 2)].chunk
}
}
const client = getClient(opts)
client.bulk({
body: docs
}, function (err, result) {
if (!err) {
const items = result.items
for (var i = 0; i < items.length; i++) {
// depending on the Elasticsearch version, the bulk response might
// contain fields 'create' or 'index' (> ES 5.x)
const create = items[i].index || items[i].create
splitter.emit('insert', create, chunks[i].chunk)
}
} else {
splitter.emit('insertError', err)
}
// skip error and continue
cb()
})
},
write: function (body, enc, cb) {
const obj = {index, type, body}
const client = getClient(opts)
client.index(obj, function (err, data) {
if (!err) {
splitter.emit('insert', data, body)
} else {
splitter.emit('insertError', err)
}
// skip error and continue
cb()
})
}
})
pump(splitter, writable)
return splitter
}
module.exports = pinoElasticSearch
function start (opts) {
if (opts.help) {
console.log(fs.readFileSync(path.join(__dirname, './usage.txt'), 'utf8'))
return
}
if (opts.version) {
console.log('pino-elasticsearch', require('./package.json').version)
return
}
pump(process.stdin, pinoElasticSearch(opts))
}
if (require.main === module) {
start(minimist(process.argv.slice(2), {
alias: {
version: 'v',
help: 'h',
host: 'H',
port: 'p',
index: 'i',
'bulk-size': 'b',
'trace-level': 'l'
},
default: {
host: 'localhost',
port: 9200
}
}))
}