I want to upload MySQL table data to kibana using Logstash and JDBC.
MYSql database username is "root" and password is blank. I tried giving password as "" and " ", "Null" but it's not working.
This is my logstash configuration file:
input {
jdbc {
jdbc_driver_library => "C:/elasticsearch-7.3.0/driver/com.mysql.jdbc_5.1.5.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/dbname?useSSL=false"
jdbc_user => "root"
jdbc_password=>" "
statement => "SELECT * FROM table"
}
}
output {
stdout { codec => rubydebug }
elasticsearch {
hosts => ["localhost"]
index => "index_name"
}
}
logstash output:
[2019-11-06T13:02:28,143][ERROR][logstash.inputs.jdbc ] Failed to load C:/elasticsearch-7.3.0/driver/com.mysql.jdbc_5.1.5.jar {:exception=>#}
[2019-11-06T13:02:28,146][ERROR][logstash.javapipeline ] A plugin had an unrecoverable error. Will restart this plugin.
Pipeline_id:main
Plugin: "root", jdbc_password=>, statement=>"SELECT * FROM tracker", jdbc_driver_library=>"C:/elasticsearch-7.3.0/driver/com.mysql.jdbc_5.1.5.jar", jdbc_connection_string=>"jdbc:mysql://localhost:3306/pvtrace?useSSL=false", id=>"5eccb173adcbec4cd0c68701c4737d83e11f82fdc157788bc9b76507e2a70a06", jdbc_driver_class=>"com.mysql.jdbc.Driver", enable_metric=>true, codec=>"plain_feefd4f8-c2ca-4050-8044-04f466e0c157", enable_metric=>true, charset=>"UTF-8">, jdbc_paging_enabled=>false, jdbc_page_size=>100000, jdbc_validate_connection=>false, jdbc_validation_timeout=>3600, jdbc_pool_timeout=>5, sql_log_level=>"info", connection_retry_attempts=>1, connection_retry_attempts_wait_time=>0.5, parameters=>{"sql_last_value"=>1970-01-01 00:00:00 UTC}, last_run_metadata_path=>"C:\Users\himanshika.yeduvans/.logstash_jdbc_last_run", use_column_value=>false, tracking_column_type=>"numeric", clean_run=>false, record_last_run=>true, lowercase_column_names=>true>
Error: com.mysql.jdbc.Driver not loaded. Are you sure you've included the correct jdbc driver in :jdbc_driver_library?
Exception: LogStash::ConfigurationError
Stack: C:/logstash-7.3.0/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/plugin_mixins/jdbc/jdbc.rb:163:in open_jdbc_connection'
C:/logstash-7.3.0/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/plugin_mixins/jdbc/jdbc.rb:221:in
execute_statement'
C:/logstash-7.3.0/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/inputs/jdbc.rb:277:in execute_query'
C:/logstash-7.3.0/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/inputs/jdbc.rb:263:in
run'
C:/logstash-7.3.0/logstash-core/lib/logstash/java_pipeline.rb:309:in inputworker'
C:/logstash-7.3.0/logstash-core/lib/logstash/java_pipeline.rb:302:in
block in start_input'
[2019-11-06T13:03:31,349][WARN ][logstash.runner ] SIGINT received. Shutting down.
[2019-11-06T13:03:32,070][ERROR][logstash.inputs.jdbc ] Failed to load C:/elasticsearch-7.3.0/driver/com.mysql.jdbc_5.1.5.jar {:exception=>#}
[2019-11-06T13:03:36,354][WARN ][logstash.runner ] Received shutdown signal, but pipeline is still waiting for in-flight events
to be processed. Sending another ^C will force quit Logstash, but this may cause data loss.