Logstash settup to Mysql hosted on AWS RDS. Won't connect - mysql

I have a MySQL DB hosted on AWS RDS. I am running ElasticSearch locally and using Logstash to retrieve data from MYSQL server that is on AWS to then push the data to my ElasticSearch DB.
The problem is that my logstash file isn't settup correctly I guess:
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://aws.ffffffffff.us-east-1.rds.amazonaws.com:3306/dbName?user=userName&password=pword"
jdbc_user => "user"
jdbc_password => "pword"
schedule => "* * * * *"
jdbc_validate_connection => true
jdbc_driver_library => "C:\Program Files (x86)\MySQL\Connector J 8.0\mysql-connector-java-8.0.19.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
statement => "SELECT * from data-5"
type => "data-5"
tags => ["data-5"]
}
jdbc {
jdbc_connection_string => "jdbc:mysql://aws.ffffffffff.us-east-1.rds.amazonaws.com:3306/dbName?user=userName&password=pword"
jdbc_user => "user"
jdbc_password => "pword"
schedule => "* * * * *"
jdbc_validate_connection => true
jdbc_driver_library => "C:\Program Files (x86)\MySQL\Connector J 8.0\mysql-connector-java-8.0.19.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
statement => "SELECT * from data-4"
type => "data-4"
tags => ["data-4"]
}
jdbc {
jdbc_connection_string => "jdbc:mysql://aws.ffffffffff.us-east-1.rds.amazonaws.com:3306/dbName?user=userName&password=pword"
jdbc_user => "user"
jdbc_password => "pword"
schedule => "* * * * *"
jdbc_validate_connection => true
jdbc_driver_library => "C:\Program Files (x86)\MySQL\Connector J 8.0\mysql-connector-java-8.0.19.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
statement => "SELECT * from data-3"
type => "data-3"
tags => ["data-3"]
}
}
output {
stdout { codec => json_lines }
if "data-5" in [tags] {
elasticsearch {
hosts => ["http://127.0.0.1:9200/"]
index => "data-5"
document_type => "data-%{+YYYY.MM.dd}"
}
}
if "data-4" in [tags] {
elasticsearch {
hosts => ["http://127.0.0.1:9200/"]
index => "data-4"
document_type => "data-%{+YYYY.MM.dd}"
}
}
if "data-3" in [tags] {
elasticsearch {
hosts => ["http://127.0.0.1:9200/"]
index => "data-3"
document_type => "data-%{+YYYY.MM.dd}"
}
}
}
This is the fun part of programming right?
Anyway, locally I am on windows as you may be able to tell from the file path to the jdbc driver library. My jdbc connection to the AWS RDS is copied and pasted from AWS Console, so no typos were involved.
I am told that I only need to append jdbc:mysql:// to the url. But is there anything I'm missing to do in the AWS console? Do I need to modify my RDS instance?
The error by the way is:
Unable to connect to database. Tried 1 times
{:error_message=>"Java::ComMysqlCjJdbcExceptions::CommunicationsException:
Communications link failure\n\n
The last packet sent successfully to the server was 0 milliseconds ago.
The driver has not received any packets from the server."

I had a similar issue, and SSL was not enabled for communication. So, AWS RDS was not allowing to connect.
I specified an additional query param useSSL=false to JDBC connection string, and problem was solved.
So, in your case, the jdbc_connection_string would be as follows:
"jdbc:mysql://aws.ffffffffff.us-east-1.rds.amazonaws.com:3306/dbName?useSSL=false&user=userName&password=pword"

Related

How correctly push data from Logstash to Elasticsearch server?

I am new in ELK. I need to visualize data from a PostgreSQL in Kibana. I ran into a little problem and need a help.
I use:
Elasticsearch 6.4.1
Kibana 6.4.1
Logstash 6.4.1
When I run next logstash.conf file it don't send me correct data to elasticsearch server. What I need to change in my configuration file?
logstash.conf:
input
{
jbdc_connection_string => "path_to_database"
jdbc_user => "postgres"
jdbc_password => "postgres"
jdbc_driver_library => "/path_to/postgresql-42.2.5.jar"
jdbc_driver_class => "org.postgresql.Driver"
statement => "SELECT * from documents"
}
output
{
elasticsearch
{
hosts => ["localhost:9200"]
index => "documents"
}
}
Only when in output I use next configuration I see correct data in terminal:
strout
{
codes => json_lines
}

logstash: not able to connect mysql with logstash?

I am trying to make connection with MySQL using logstash and write into elastic search below in my code in conf file
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://192.168.2.24:3306/test"
# The user we wish to execute our statement as
jdbc_user => "uname"
jdbc_password => "pass"
# The path to our downloaded jdbc driver
jdbc_driver_library => "/usr/local/Cellar/logstash/6.2.4/mysql-connector-java-8.0.11.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
# our query
statement => "SELECT * FROM report_table"
}
}
output {
elasticsearch {
action => "index"
hosts => "localhost:9200"
index => "mysqlsample"
document_type => "record"
}
}
on running the above getting the below error :
Error: com.mysql.jdbc.Driver not loaded. Are you sure you've included
the correct jdbc driver in :jdbc_driver_library? Exception:
LogStash::ConfigurationError Stack:
/usr/local/Cellar/logstash/6.2.4/libexec/vendor/bundle/jruby/2.3.0/gems/logstash-input-jdbc-4.3.9/lib/logstash/plugin_mixins/jdbc.rb:162:in
open_jdbc_connection'
/usr/local/Cellar/logstash/6.2.4/libexec/vendor/bundle/jruby/2.3.0/gems/logstash-input-jdbc-4.3.9/lib/logstash/plugin_mixins/jdbc.rb:220:in
execute_statement'
/usr/local/Cellar/logstash/6.2.4/libexec/vendor/bundle/jruby/2.3.0/gems/logstash-input-jdbc-4.3.9/lib/logstash/inputs/jdbc.rb:264:in
execute_query'
/usr/local/Cellar/logstash/6.2.4/libexec/vendor/bundle/jruby/2.3.0/gems/logstash-input-jdbc-4.3.9/lib/logstash/inputs/jdbc.rb:250:in
run'
/usr/local/Cellar/logstash/6.2.4/libexec/logstash-core/lib/logstash/pipeline.rb:514:in
inputworker'
/usr/local/Cellar/logstash/6.2.4/libexec/logstash-core/lib/logstash/pipeline.rb:507:in
block in start_input'
Sounds like it's an issue with jdbc_driver_library => "/usr/local/Cellar/logstash/6.2.4/mysql-connector-java-8.0.11.jar".
Are you sure it's a valid path? Is that the correct connector? Maybe try to use the one that the documentation mentions: mysql-connector-java-5.1.36-bin.jar

Migration from MySQL to Elasticsearch using Logstash

I am new to ELK stack. I am working with data migration from MySQL TO elasticsearch. I am following this tutorial:
https://qbox.io/blog/migrating-mysql-data-into-elasticsearch-using-logstash
and I have installed and configured MySQL and ElasticSearch. I could not configure Logstash.
I dont know where to find logstash.conf, so i created a file named logstash.conf in my conf.d file of logstash folder. I wrote the following in logstash.conf:
input {
jdbc {
jdbc_driver_library => "usr/share/java/mysql-connector-java.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/books"
jdbc_user => "root"
jdbc_password => "root"
statement => "SELECT * FROM authors"
}
}
output {
stdout { codec => json_lines }
elasticsearch {
"hosts" => "localhost:9200"
"index" => "my-authors"
"document_type" => "data"
}
}
But when I run my command bin/logstash -f logstash.conf by going into /etc/logstash/conf.d folder from ubuntu terminal, It gives an error stating that bin/logstash does not exist.
Please help me with the issue.

I can't connect my MySQL Database with jdbc in logstash

I want to input some data from MySQL database with logstash.
Here is my jdbc.conf
input {
jdbc {
jdbc_driver_library => "/mysql-connector-java-5.1.40/mysql-connector-java-5.1.40-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://111.11.11.111:3306/dbname"
jdbc_user => "user"
jdbc_password => "****"
statement => "SELECT title from test"
}
}
output {
stdout { codec => json }
}
username, password, host, dbname and column_name are fake. and output is just for testing.
My database is on the same VPS server.
--configtest is cleared. However, I got this error.
/opt/logstash/bin/logstash -f /opt/logstash/bin/config/jdbc.conf
Settings: Default pipeline workers: 4
Pipeline aborted due to error {
:exception=>"LogStash::ConfigurationError",
:backtrace=>[
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-jdbc-3.1.0/lib/logstash/plugin_mixins/jdbc.rb:159:in `prepare_jdbc_connection'",
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-jdbc-3.1.0/lib/logstash/inputs/jdbc.rb:187:in `register'",
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.4.0-java/lib/logstash/pipeline.rb:330:in `start_inputs'",
"org/jruby/RubyArray.java:1613:in `each'",
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.4.0-java/lib/logstash/pipeline.rb:329:in `start_inputs'",
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.4.0-java/lib/logstash/pipeline.rb:180:in `start_workers'",
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.4.0-java/lib/logstash/pipeline.rb:136:in `run'",
"/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.4.0-java/lib/logstash/agent.rb:491:in `start_pipeline'"],
:level=>:error}
stopping pipeline {:id=>"main"}
I got the LogStash::ConfigurationError. Waht's wrong with my config?
I finally figured it out.
It was just a bug of jdbc-driver.
While I was facing to the bug, I was using ver5.1.40 via the mysql webpage.
After I changed it to mysql-connector-java-5.1.17 via yum, It does work.

using logstash to sync data

I'm trying to use logstash to sync all my data on my MySql server to my Elasticsearch server.
I've aleardy learned the basics of logstash.conf, this is my file:
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://localhost/homestead"
jdbc_user => "homestead"
jdbc_password => "secret"
jdbc_driver_library => "/home/vagrant/Code/mysql-connector-java-5.1.38-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
statement => "SELECT * from volunteer"
}
jdbc {
jdbc_connection_string => "jdbc:mysql://localhost/homestead"
jdbc_user => "homestead"
jdbc_password => "secret"
jdbc_driver_library => "/home/vagrant/Code/mysql-connector-java-5.1.38-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
statement => "SELECT * from contact"
}
}
output {
elasticsearch {
document_id => "%{uid}"
hosts => "localhost"
}
}
My intention is to copy every table into a Type. How do I specify this?
edit: "type" instead of "index"
Thank you!
What you can do is simply to add a field (using add_field) in each input denoting the type name which you want the data to be indexed in and then use that variable as the type name in the elasticsearch output.
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://localhost/homestead"
jdbc_user => "homestead"
jdbc_password => "secret"
jdbc_driver_library => "/home/vagrant/Code/mysql-connector-java-5.1.38-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
statement => "SELECT * from volunteer"
add_field => {"type" => "volunteer"}
}
jdbc {
jdbc_connection_string => "jdbc:mysql://localhost/homestead"
jdbc_user => "homestead"
jdbc_password => "secret"
jdbc_driver_library => "/home/vagrant/Code/mysql-connector-java-5.1.38-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
statement => "SELECT * from contact"
add_field => {"type" => "contact"}
}
}
output {
elasticsearch {
hosts => ["localhost"]
index => "homestead"
document_type => "%{type}" <--- specify the index here
document_id => "%{uid}"
}
}
Be aware though that using the same index to host several different mapping types might lead to type conflicts. The short story is that two different fields with the same name in two different types MUST ALWAYS have the same type definition. Read more about it in this blog article