Created
June 15, 2016 21:59
-
-
Save 32curveball/3ee9666c7b89d8346922b96204bca236 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
input { | |
file { | |
path => ["/var/log/network.log"] | |
sincedb_path => "/var/log/logstash" | |
start_position => "beginning" | |
type => "syslog" | |
tags => [ "asa_log" ] | |
} | |
} #end of input block | |
filter { | |
if [type] == "syslog" { | |
# Split the syslog part and Cisco tag out of the message | |
grok { | |
match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"] | |
} | |
# Parse the syslog severity and facility | |
syslog_pri { } | |
# Parse the date from the "timestamp" field to the "@timestamp" field | |
# 2015-05-01T00:00:00+02:00 is ISO8601 | |
grok { | |
match => ["message", "%{TIMESTAMP_ISO8601:timestamp}"] | |
} | |
date { | |
#2015-05-01T00:00:00+02:00 | |
match => ["timestamp", | |
"yyyy-MM-dd'T'HH:mm:ssZ" | |
# "yyyy MM dd HH:mm:ss", | |
] | |
#timezone => "Europe/Paris" | |
} | |
# Clean up redundant fields if parsing was successful | |
if "_grokparsefailure" not in [tags] { | |
mutate { | |
rename => ["cisco_message", "message"] | |
remove_field => ["timestamp"] | |
} | |
} | |
# Extract fields from the each of the detailed message types | |
grok { | |
match => [ | |
"message", "%{CISCOFW106001}", | |
"message", "%{CISCOFW106006_106007_106010}", | |
"message", "%{CISCOFW106014}", | |
"message", "%{CISCOFW106015}", | |
"message", "%{CISCOFW106021}", | |
"message", "%{CISCOFW106023}", | |
"message", "%{CISCOFW106100}", | |
"message", "%{CISCOFW110002}", | |
"message", "%{CISCOFW302010}", | |
"message", "%{CISCOFW302013_302014_302015_302016}", | |
"message", "%{CISCOFW302020_302021}", | |
"message", "%{CISCOFW305011}", | |
"message", "%{CISCOFW313001_313004_313008}", | |
"message", "%{CISCOFW313005}", | |
"message", "%{CISCOFW402117}", | |
"message", "%{CISCOFW402119}", | |
"message", "%{CISCOFW419001}", | |
"message", "%{CISCOFW419002}", | |
"message", "%{CISCOFW500004}", | |
"message", "%{CISCOFW602303_602304}", | |
"message", "%{CISCOFW710001_710002_710003_710005_710006}", | |
"message", "%{CISCOFW713172}", | |
"message", "%{CISCOFW733100}" | |
] | |
} | |
} | |
#Geolocate logs that have src_ip and if that src_ip is a non-RFC1918 address or APIPA address | |
if [src_ip] and [src_ip] !~ "(^127\.0\.0\.1)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)|(^169\.254\.)" { | |
geoip { | |
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat" | |
source => "src_ip" | |
target => "src_geoip" | |
} | |
#Delete 0,0 in src_geoip.location if equal to 0,0 | |
if ([src_geoip.location] and [src_geoip.location] =~ "0,0") { | |
mutate { | |
replace => [ "src_geoip.location", "" ] | |
} | |
} | |
} | |
#Geolocate logs that have dst_ip and if that dst_ip is a non-RFC1918 address or APIPA address | |
if [dst_ip] and [dst_ip] !~ "(^127\.0\.0\.1)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)|(^169\.254\.)" { | |
geoip { | |
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat" | |
source => "dst_ip" | |
target => "dst_geoip" | |
} | |
#Delete 0,0 in dst_geoip.location if equal to 0,0 | |
if ([dst_geoip.location] and [dst_geoip.location] =~ "0,0") { | |
mutate { | |
replace => [ "dst_ip.location", "" ] | |
} | |
} | |
} | |
#Takes the 4-tuple of source address, destination address, destination port, and protocol and does a SHA1 hash to fingerprint the flow. This is a useful | |
#way to be able to do top N terms queries on flows, not just on one field. | |
if "asa_log" in [tags] and [SourceAddress] and [DestinationAddress] { | |
fingerprint { | |
concatenate_sources => true | |
method => "SHA1" | |
key => "logstash" | |
source => [ "SourceAddress", "DestinationAddress", "DestinationPort", "IPProtocol" ] | |
} | |
} | |
} #end filter block | |
output { | |
elasticsearch { | |
protocol => "node" | |
node_name => "monitor01" | |
cluster => "elasticsearch" | |
host => "monitor01.etrigue.com" | |
} | |
} #end output block |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[root@monitor01 conf.d]# /opt/logstash/bin/logstash -f 11-logstash.conf | |
Using milestone 2 input plugin 'file'. This plugin should be stable, but if you see strange behavior, please let us know! For more information on plugin milestones, see http://logstash.net/docs/1.4.5/plugin-milestones {:level=>:warn} | |
Using milestone 1 filter plugin 'syslog_pri'. This plugin should work, but would benefit from use by folks like you. Please let us know if you find bugs or have suggestions on how to improve this plugin. For more information on plugin milestones, see http://logstash.net/docs/1.4.5/plugin-milestones {:level=>:warn} | |
Using milestone 1 filter plugin 'fingerprint'. This plugin should work, but would benefit from use by folks like you. Please let us know if you find bugs or have suggestions on how to improve this plugin. For more information on plugin milestones, see http://logstash.net/docs/1.4.5/plugin-milestones {:level=>:warn} | |
+---------------------------------------------------------+ | |
| An unexpected error occurred. This is probably a bug. | | |
| You can find help with this problem in a few places: | | |
| | | |
| * chat: #logstash IRC channel on freenode irc. | | |
| IRC via the web: http://goo.gl/TI4Ro | | |
| * email: logstash-users@googlegroups.com | | |
| * bug system: https://logstash.jira.com/ | | |
| | | |
+---------------------------------------------------------+ | |
The error reported is: | |
Invalid argument - Invalid file | |
[root@monitor01 conf.d]# |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment