2017-02-04 138 views
0

我試圖解析某些日誌與神交,但我有一些麻煩,這樣做時,日誌行別看,有時同樣的...分析日誌,神交,但在不同的NodeJS行格式

我日誌文件讓說看起來像這樣:

[2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager) 
[2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager) 
[2017-02-03 19:26:20,605] INFO Rolled new log segment for \'omega-replica-sync-dev-8\' in 21 ms. (kafka.log.Log) 
[2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log) 
[2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log) 

我當前節點的代碼如下所示:

'use strict'; 

var nodegrok = require('node-grok'); 
var Regex = require("regex"); 
var zlib = require('zlib'); 

var msg = '[2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:26:20,605] INFO Rolled new log segment for \'omega-replica-sync-dev-8\' in 21 ms. (kafka.log.Log)\n[2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log)\n[2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log)' 

console.log('message: ', msg); 

var p2 = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} \\[%{DATA:message1}\\]: %{GREEDYDATA:message2}' 

var lines = msg.toString().split('\n'); 

for(var i = 0;i < lines.length;i++){ 

    console.log('line [i]:', lines[i]) 
    var str = lines[i] 

    var patterns = require('node-grok').loadDefaultSync(); 
    var pattern = patterns.createPattern(p2) 
    console.log('pattern:', pattern.parseSync(lines[i])); 

} 

但近兩年似乎輸出空...因爲它缺少在模式的第三部分。

line [i]: [2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager) 
pattern: { timestamp: '2017-02-03 19:15:51,112', 
    level: 'INFO', 
    message1: 'Group Metadata Manager on Broker 1', 
    message2: 'Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)' } 
line [i]: [2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager) 
pattern: { timestamp: '2017-02-03 19:25:51,112', 
    level: 'INFO', 
    message1: 'Group Metadata Manager on Broker 1', 
    message2: 'Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)' } 
line [i]: [2017-02-03 19:26:20,605] INFO Rolled new log segment for 'omega-replica-sync-dev-8' in 21 ms. (kafka.log.Log) 
pattern: null 
line [i]: [2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log) 
pattern: null 
line [i]: [2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log) 
pattern: null 

如何格式化不同格式的行然後在grok中?

回答

0

所以這裏有一個方法來做到這一點,我開始工作......基本上看看模式是否與if語句匹配,然後評估,但如果有6種潛在的日誌格式呢?如果嵌套的語句是否必須正確6?聽起來對我來說是一種有效的方式...有更好的方法嗎?

'use strict'; 

var nodegrok = require('node-grok'); 
var Regex = require("regex"); 
var zlib = require('zlib'); 

var msg = '[2017-02-03 19:15:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:25:51,112] INFO [Group Metadata Manager on Broker 1]: Removed 0 expired offsets in 0 milliseconds. (kafka.coordinator.GroupMetadataManager)\n[2017-02-03 19:26:20,605] INFO Rolled new log segment for \'omega-replica-sync-dev-8\' in 21 ms. (kafka.log.Log)\n[2017-02-03 19:26:20,605] INFO Scheduling log segment 1 for log omega-replica-sync-dev-8 for deletion. (kafka.log.Log)\n[2017-02-03 19:27:20,606] INFO Deleting segment 1 from log omega-replica-sync-dev-8. (kafka.log.Log)' 

console.log('message: ', msg); 

var p2 = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} \\[%{DATA:message1}\\]: %{GREEDYDATA:message2}' 

var lines = msg.toString().split('\n'); 

for(var i = 0;i < lines.length;i++){ 

    console.log('line [i]:', lines[i]) 
    var str = lines[i] 
    var p = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} \\[%{DATA:message1}\\]: %{GREEDYDATA:message2}' 
    var p2 = '\\[%{TIMESTAMP_ISO8601:timestamp}\\] %{LOGLEVEL:level} %{GREEDYDATA:message2}' 

    var patterns = require('node-grok').loadDefaultSync(); 
    var pattern = patterns.createPattern(p) 

    if (pattern.parseSync(lines[i]) == null) { 
     var pattern = patterns.createPattern(p2) 
     console.log('patternf:', pattern.parseSync(lines[i])); 

    } else { 
     console.log('pattern:', pattern.parseSync(lines[i])); 
    } 

}