You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Here are the settings that are having issues.
Elasticsearch 6.2
logstash 6.8.19
plugin logstash-input-kinesis 2.0.7
plugin logstash-output-amazon_es 2.0.1
plugin logstash-codec-cloudwatch_logs 0.0.2
The same plugins with logstash 6.8.16 are working.
Logstash Error:
"[2021-10-06T21:59:12,122][ERROR][logstash.outputs.amazones] Attempted to send a bulk request to Elasticsearch configured at '["https://vpc-soa-monitoring-ess-nonprd-xxocbmcy5li5hkqn4n3ipup2ra.us-east-1.es.amazonaws.com:443"]', but an error occurred and it failed! Are you sure you can reach elasticsearch from this machine using the configuration provided? {:client_config=>{:hosts=>["https://vpc-soa-monitoring-ess-nonprd-xxocbmcy5li5hkqn4n3ipup2ra.us-east-1.es.amazonaws.com:443"], :region=>"us-east-1", :transport_options=>{:request=>{:open_timeout=>0, :timeout=>60}, :proxy=>nil, :headers=>{"Content-Type"=>"application/json"}}, :transport_class=>Elasticsearch::Transport::Transport::HTTP::AWS}, :error_message=>"undefined method key?' for nil:NilClass", :error_class=>"NoMethodError", :backtrace=>["/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/faraday/adapter/manticore.rb:34:in client'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/faraday/adapter/manticore.rb:65:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/aws_v4_signer_impl.rb:49:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/faraday-0.9.2/lib/faraday/rack_builder.rb:139:in build_response'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/faraday-0.9.2/lib/faraday/connection.rb:377:in run_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/aws_transport.rb:49:in block in perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/transport/base.rb:262:in perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/aws_transport.rb:48:in perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/client.rb:131:in perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-api-5.0.5/lib/elasticsearch/api/actions/bulk.rb:95:in bulk'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/http_client.rb:53:in bulk'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:321:in block in submit'", "org/jruby/ext/thread/Mutex.java:165:in synchronize'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:318:in submit'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:351:in flush'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/buffer.rb:219:in block in buffer_flush'", "org/jruby/RubyHash.java:1419:in each'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/buffer.rb:216:in buffer_flush'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/buffer.rb:159:in buffer_receive'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:311:in receive'", "/usr/share/logstash/logstash-core/lib/logstash/outputs/base.rb:89:in block in multi_receive'", "org/jruby/RubyArray.java:1792:in each'", "/usr/share/logstash/logstash-core/lib/logstash/outputs/base.rb:89:in multi_receive'", "org/logstash/config/ir/compiler/OutputStrategyExt.java:118:in multi_receive'", "org/logstash/config/ir/compiler/AbstractOutputDelegatorExt.java:101:in multi_receive'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:390:in block in output_batch'", "org/jruby/RubyHash.java:1419:in each'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:389:in output_batch'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:341:in worker_loop'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:304:in `block in start_workers'"]}"
Please help!
The text was updated successfully, but these errors were encountered:
Here are the settings that are having issues.
Elasticsearch 6.2
logstash 6.8.19
plugin logstash-input-kinesis 2.0.7
plugin logstash-output-amazon_es 2.0.1
plugin logstash-codec-cloudwatch_logs 0.0.2
The same plugins with logstash 6.8.16 are working.
Logstash Error:
"[2021-10-06T21:59:12,122][ERROR][logstash.outputs.amazones] Attempted to send a bulk request to Elasticsearch configured at '["https://vpc-soa-monitoring-ess-nonprd-xxocbmcy5li5hkqn4n3ipup2ra.us-east-1.es.amazonaws.com:443"]', but an error occurred and it failed! Are you sure you can reach elasticsearch from this machine using the configuration provided? {:client_config=>{:hosts=>["https://vpc-soa-monitoring-ess-nonprd-xxocbmcy5li5hkqn4n3ipup2ra.us-east-1.es.amazonaws.com:443"], :region=>"us-east-1", :transport_options=>{:request=>{:open_timeout=>0, :timeout=>60}, :proxy=>nil, :headers=>{"Content-Type"=>"application/json"}}, :transport_class=>Elasticsearch::Transport::Transport::HTTP::AWS}, :error_message=>"undefined method
key?' for nil:NilClass", :error_class=>"NoMethodError", :backtrace=>["/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/faraday/adapter/manticore.rb:34:in
client'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/faraday/adapter/manticore.rb:65:incall'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/aws_v4_signer_impl.rb:49:in
call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/faraday-0.9.2/lib/faraday/rack_builder.rb:139:inbuild_response'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/faraday-0.9.2/lib/faraday/connection.rb:377:in
run_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/aws_transport.rb:49:inblock in perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/transport/base.rb:262:in
perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/aws_transport.rb:48:inperform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/client.rb:131:in
perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-api-5.0.5/lib/elasticsearch/api/actions/bulk.rb:95:inbulk'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es/http_client.rb:53:in
bulk'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:321:inblock in submit'", "org/jruby/ext/thread/Mutex.java:165:in
synchronize'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:318:insubmit'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:351:in
flush'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/buffer.rb:219:inblock in buffer_flush'", "org/jruby/RubyHash.java:1419:in
each'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/buffer.rb:216:inbuffer_flush'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/buffer.rb:159:in
buffer_receive'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-amazon_es-2.0.1-java/lib/logstash/outputs/amazon_es.rb:311:inreceive'", "/usr/share/logstash/logstash-core/lib/logstash/outputs/base.rb:89:in
block in multi_receive'", "org/jruby/RubyArray.java:1792:ineach'", "/usr/share/logstash/logstash-core/lib/logstash/outputs/base.rb:89:in
multi_receive'", "org/logstash/config/ir/compiler/OutputStrategyExt.java:118:inmulti_receive'", "org/logstash/config/ir/compiler/AbstractOutputDelegatorExt.java:101:in
multi_receive'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:390:inblock in output_batch'", "org/jruby/RubyHash.java:1419:in
each'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:389:inoutput_batch'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:341:in
worker_loop'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:304:in `block in start_workers'"]}"Please help!
The text was updated successfully, but these errors were encountered: