Last active
June 15, 2016 23:05
-
-
Save 32curveball/a1b8ea3b03836267e8d8ee15dbc56c0e to your computer and use it in GitHub Desktop.
after commenting out the sincedb_path and start_position
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[root@monitor01 conf.d]# /opt/logstash/bin/logstash -f 11-logstash.conf | |
Error: Expected one of #, => at line 105, column 19 (byte 3393) after filter { | |
if [type] == "syslog" { | |
# Split the syslog part and Cisco tag out of the message | |
grok { | |
match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"] | |
} | |
# Parse the syslog severity and facility | |
syslog_pri { } | |
# Parse the date from the "timestamp" field to the "@timestamp" field | |
# 2015-05-01T00:00:00+02:00 is ISO8601 | |
grok { | |
match => ["message", "%{TIMESTAMP_ISO8601:timestamp}"] | |
} | |
date { | |
#2015-05-01T00:00:00+02:00 | |
match => ["timestamp", | |
"yyyy-MM-dd'T'HH:mm:ssZ" | |
# "yyyy MM dd HH:mm:ss", | |
] | |
#timezone => "Europe/Paris" | |
} | |
# Clean up redundant fields if parsing was successful | |
if "_grokparsefailure" not in [tags] { | |
mutate { | |
rename => ["cisco_message", "message"] | |
remove_field => ["timestamp"] | |
} | |
} | |
# Extract fields from the each of the detailed message types | |
grok { | |
match => [ | |
"message", "%{CISCOFW106001}", | |
"message", "%{CISCOFW106006_106007_106010}", | |
"message", "%{CISCOFW106014}", | |
"message", "%{CISCOFW106015}", | |
"message", "%{CISCOFW106021}", | |
"message", "%{CISCOFW106023}", | |
"message", "%{CISCOFW106100}", | |
"message", "%{CISCOFW110002}", | |
"message", "%{CISCOFW302010}", | |
"message", "%{CISCOFW302013_302014_302015_302016}", | |
"message", "%{CISCOFW302020_302021}", | |
"message", "%{CISCOFW305011}", | |
"message", "%{CISCOFW313001_313004_313008}", | |
"message", "%{CISCOFW313005}", | |
"message", "%{CISCOFW402117}", | |
"message", "%{CISCOFW402119}", | |
"message", "%{CISCOFW419001}", | |
"message", "%{CISCOFW419002}", | |
"message", "%{CISCOFW500004}", | |
"message", "%{CISCOFW602303_602304}", | |
"message", "%{CISCOFW710001_710002_710003_710005_710006}", | |
"message", "%{CISCOFW713172}", | |
"message", "%{CISCOFW733100}" | |
] | |
} | |
} | |
#Geolocate logs that have src_ip and if that src_ip is a non-RFC1918 address or APIPA address | |
if [src_ip] and [src_ip] !~ "(^127\.0\.0\.1)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)|(^169\.254\.)" { | |
geoip { | |
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat" | |
source => "src_ip" | |
target => "src_geoip" | |
} | |
#Delete 0,0 in src_geoip.location if equal to 0,0 | |
if ([src_geoip.location] and [src_geoip.location] =~ "0,0") { | |
mutate { | |
replace => [ "src_geoip.location", "" ] | |
} | |
} | |
} | |
#Geolocate logs that have dst_ip and if that dst_ip is a non-RFC1918 address or APIPA address | |
if [dst_ip] and [dst_ip] !~ "(^127\.0\.0\.1)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)|(^169\.254\.)" { | |
geoip { | |
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat" | |
source => "dst_ip" | |
target => "dst_geoip" | |
} | |
#Delete 0,0 in dst_geoip.location if equal to 0,0 | |
if ([dst_geoip.location] and [dst_geoip.location] =~ "0,0") { | |
mutate { | |
replace => [ "dst_ip.location", "" ] | |
} | |
} | |
} | |
#end filter block | |
output { | |
elasticsearch | |
You may be interested in the '--configtest' flag which you can | |
use to validate logstash's configuration before you choose | |
to restart a running system. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment