sync dbms using logstash

#set logstash

[root@elk74 ~]# cd /etc/logstash/ [root@elk74 logstash]# ll 합계 44 drwxrwxr-x. 2 root root 6 3월 26 17:55 conf.d -rw-r--r--. 1 root root 2019 3월 26 17:55 jvm.options -rw-r--r--. 1 root root 8880 3월 26 17:55 log4j2.properties -rw-r--r--. 1 root root 959 5월 8 11:09 logs.yml -rw-r--r--. 1 root root 342 3월 26 17:55 logstash-sample.conf -rw-r--r--. 1 root root 8866 5월 8 08:58 logstash.yml -rw-r--r--. 1 root root 285 3월 26 17:55 pipelines.yml -rw-------. 1 root root 1696 3월 26 17:55 startup.options [root@elk74 logstash]# vi logs.yml input { jdbc { jdbc_driver_library => "/root/mysql-connector-java-8.0.16.jar" jdbc_driver_class => "com.mysql.jdbc.Driver" jdbc_connection_string => "jdbc:mysql://192.168.0.158:3306/es_db" jdbc_user => root jdbc_password => mypass123 jdbc_paging_enabled => true tracking_column => "unix_ts_in_secs" use_column_value => true tracking_column_type => "numeric" schedule => "*/5 * * * * *" statement => "SELECT *, UNIX_TIMESTAMP(modification_time) AS unix_ts_in_secs FROM es_table WHERE (UNIX_TIMESTAMP(modification_time) > :sql_last_value AND modification_time < NOW()) ORDER BY modification_time ASC" } } filter { mutate { copy => { "id" => "[@metadata][_id]"} remove_field => ["id", "@version", "unix_ts_in_secs"] } } output { # stdout { codec => "rubydebug"} elasticsearch { hosts => ["192.168.0.126:9200"] index => "rdbms_sync_idx" document_id => "%{[@metadata][_id]}" } } ~

#start logstash


[root@elk74 logstash]# /usr/share/logstash/bin/logstash -f logs.yml
[INFO ] 2020-05-08 11:39:15.338 [Ruby-0-Thread-18: :1] jdbc - (0.002761s) SELECT version()
[INFO ] 2020-05-08 11:39:15.351 [Ruby-0-Thread-18: :1] jdbc - (0.008029s) SELECT version() [INFO ] 2020-05-08 11:39:15.362 [Ruby-0-Thread-18: :1] jdbc - (0.005341s) SELECT count(*) AS `count` FROM (SELECT *, UNIX_TIMESTAMP(modification_time) AS unix_ts_in_secs FROM es_table WHERE (UNIX_TIMESTAMP(modification_time) > 1588905266 AND modification_time < NOW()) ORDER BY modification_time ASC) AS `t1` LIMIT 1

#view result


GET rdbms_sync_idx/_search { "query": { "match_all": {} } }
{ "took" : 1, "timed_out" : false, "_shards" : { "total" : 1, "successful" : 1, "skipped" : 0, "failed" : 0 }, "hits" : { "total" : { "value" : 2, "relation" : "eq" }, "max_score" : 1.0, "hits" : [ { "_index" : "rdbms_sync_idx", "_type" : "_doc", "_id" : "1", "_score" : 1.0, "_source" : { "modification_time" : "2020-05-08T02:32:45.000Z", "insertion_time" : "2020-05-08T02:32:45.000Z", "client_name" : "name1", "@timestamp" : "2020-05-08T02:32:50.921Z" } }, { "_index" : "rdbms_sync_idx", "_type" : "_doc", "_id" : "2", "_score" : 1.0, "_source" : { "modification_time" : "2020-05-08T02:34:26.000Z", "insertion_time" : "2020-05-08T02:34:26.000Z", "client_name" : "name2", "@timestamp" : "2020-05-08T02:34:30.197Z" } } ] } }

댓글

이 블로그의 인기 게시물

[!] CDN: trunk URL couldn't be downloaded: https://cdn.cocoapods.org/CocoaPods-version.yml Response: URL using bad/illegal format or missing URL

starcraft map

Data Analysis with Superset - boardless chart