elasticsearch - Linux ELK server reading cloudtrail logs - troubleshooting issues -
have built elk server running on rhel 7.2 in aws. plan have ingest cloudtrail logs s3 bucket , magic @ kibana front end it's not working , i've lost days of life trying work out hence why i'm reaching out help. can see logstash.log it's reading files in s3 bucket that's it. nothing else appears happening.
my setup:
rhel 7.2 kibana-4.5.0-1.x86_64 logstash-2.3.2-1.noarch elasticsearch-2.3.3-1.noarch nginx-1.6.3-8.el7.x86_64 (reverse proxy kibana port 80) /etc/logstash/conf.d looks : -rw-r--r-- 1 root root 574 may 31 14:55 02-cloudtrail-input.conf -rw-r--r-- 1 root root 432 may 31 15:04 30-elasticsearch-output.conf
root@elk conf.d]# cat *
input { s3 bucket => "xyz..cloudtrail" access_key_id => 'xyz' secret_access_key => 'abc' delete => false codec => "cloudtrail" prefix => "cloudtrail/awslogs/xxxxx/cloudtrail/ap-southeast-2/2016/" type => "cloudtrail" interval => 10 # seconds region => "ap-southeast-2" sincedb_path => "/data/logstash/cloudtrail/db/sincedb" } } output { #stdout {} stdout { codec => rubydebug } elasticsearch { hosts => "localhost:9200" sniffing => true #codec => "cloudtrail" #index => "cloudtrail" index => "logstash-%{+yyyy.mm.dd}" #index => "%{[@metadata][cloudtrail]}-%{+yyyy.mm.dd}" #index => "cloudtrail-%{+yyyy.mm.dd}" action => create manage_template => false workers => 4 } } installed plugins : logstash-codec-cloudtrail logstash-input-s3 logstash-output-s3
i don't know how index should defined in logstash output can search on kibana front end , select 1 of 3 time-frame options don't know means. should have sniffing = true ? should have action defined => create ? should have cloudtrail codec defined in both logstash input , output ?
can tell me means when says "plugin not defined in namespace, checking plugin file" ? sounds me can't find plugin though required plugins installed (i think)
here output of when start logstash manually... doesn't mean me..
reading config file {:config_file=>"/etc/logstash/conf.d/02-cloudtrail-input.conf", :level=>:debug, :file=>"logstash/config/loader.rb", :line=>"69", :method=>"local_config"} reading config file {:config_file=>"/etc/logstash/conf.d/30-elasticsearch-output.conf", :level=>:debug, :file=>"logstash/config/loader.rb", :line=>"69", :method=>"local_config"} plugin not defined in namespace, checking plugin file {:type=>"input", :name=>"s3", :path=>"logstash/inputs/s3", :level=>:debug, :file=>"logstash/plugin.rb", :line=>"76", :method=>"lookup"} plugin not defined in namespace, checking plugin file {:type=>"codec", :name=>"cloudtrail", :path=>"logstash/codecs/cloudtrail", :level=>:debug, :file=>"logstash/plugin.rb", :line=>"76", :method=>"lookup"} config logstash::codecs::cloudtrail/@spool_size = 50 {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@bucket = "abcdbase-cloudtrail" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@access_key_id = "xyz" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@secret_access_key = "abc" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@delete = false {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@codec = <logstash::codecs::cloudtrail spool_size=>50> {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@prefix = "abcdbase-trail/awslogs/554658506446/cloudtrail/ap-southeast-2/2016/" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@type = "cloudtrail" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@interval = 10 {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@region = "ap-southeast-2" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@sincedb_path = "/data/logstash/cloudtrail/db/sincedb" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@add_field = {} {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@use_ssl = true {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@credentials = [] {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@backup_to_bucket = nil {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@backup_add_prefix = nil {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@backup_to_dir = nil {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@exclude_pattern = nil {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} config logstash::inputs::s3/@temporary_directory = "/tmp/logstash" {:level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"153", :method=>"config_init"} plugin not defined in namespace, checking plugin file {:type=>"output", :name=>"stdout", :path=>"logstash/outputs/stdout", :level=>:debug, :file=>"logstash/plugin.rb", :line=>"76", :method=>"lookup"} plugin not defined in namespace, checking plugin file {:type=>"output", :name=>"elasticsearch", :path=>"logstash/outputs/elasticsearch", :level=>:debug, :file=>"logstash/plugin.rb", :line=>"76", :method=>"lookup"}
what mean when logstash.log keeps saying method=>"list_new_files", mean it's busy re-reading s3 bucket (every time logstash stopped/started) ?
i know that's lot of questions thought try before throwing setup in bin.
thanks
can't answer these questions directly. know services support s3 shipping out-of-the-box. might worth checking out.