Created
June 22, 2017 20:37
-
-
Save anonymous/fe44527673536be47ea0a1f6bbee3208 to your computer and use it in GitHub Desktop.
Logstash debug log #1
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
2017-06-22T12:27:58,672][DEBUG][logstash.runner ] -------- Logstash Settings (* means modified) --------- | |
[2017-06-22T12:27:58,674][DEBUG][logstash.runner ] *node.name: "ls5-poc" (default: "logstash-host") | |
[2017-06-22T12:27:58,675][DEBUG][logstash.runner ] *path.config: "/etc/logstash/conf.d" | |
[2017-06-22T12:27:58,675][DEBUG][logstash.runner ] path.data: "/usr/share/logstash/data" | |
[2017-06-22T12:27:58,675][DEBUG][logstash.runner ] config.test_and_exit: false | |
[2017-06-22T12:27:58,675][DEBUG][logstash.runner ] config.reload.automatic: false | |
[2017-06-22T12:27:58,675][DEBUG][logstash.runner ] config.reload.interval: 3 | |
[2017-06-22T12:27:58,675][DEBUG][logstash.runner ] metric.collect: true | |
[2017-06-22T12:27:58,676][DEBUG][logstash.runner ] pipeline.id: "main" | |
[2017-06-22T12:27:58,676][DEBUG][logstash.runner ] pipeline.system: false | |
[2017-06-22T12:27:58,676][DEBUG][logstash.runner ] *pipeline.workers: 8 (default: 2) | |
[2017-06-22T12:27:58,676][DEBUG][logstash.runner ] *pipeline.output.workers: 4 (default: 1) | |
[2017-06-22T12:27:58,676][DEBUG][logstash.runner ] *pipeline.batch.size: 1200 (default: 125) | |
[2017-06-22T12:27:58,677][DEBUG][logstash.runner ] pipeline.batch.delay: 5 | |
[2017-06-22T12:27:58,677][DEBUG][logstash.runner ] pipeline.unsafe_shutdown: false | |
[2017-06-22T12:27:58,677][DEBUG][logstash.runner ] path.plugins: [] | |
[2017-06-22T12:27:58,678][DEBUG][logstash.runner ] *config.debug: true (default: false) | |
[2017-06-22T12:27:58,678][DEBUG][logstash.runner ] *log.level: "debug" (default: "info") | |
[2017-06-22T12:27:58,678][DEBUG][logstash.runner ] version: false | |
[2017-06-22T12:27:58,678][DEBUG][logstash.runner ] help: false | |
[2017-06-22T12:27:58,679][DEBUG][logstash.runner ] log.format: "plain" | |
[2017-06-22T12:27:58,679][DEBUG][logstash.runner ] *http.host: "ipaddress" (default: "127.0.0.1") | |
[2017-06-22T12:27:58,679][DEBUG][logstash.runner ] *http.port: 9600..9600 (default: 9600..9700) | |
[2017-06-22T12:27:58,679][DEBUG][logstash.runner ] http.environment: "production" | |
[2017-06-22T12:27:58,679][DEBUG][logstash.runner ] queue.type: "memory" | |
[2017-06-22T12:27:58,679][DEBUG][logstash.runner ] queue.drain: false | |
[2017-06-22T12:27:58,680][DEBUG][logstash.runner ] queue.page_capacity: 262144000 | |
[2017-06-22T12:27:58,680][DEBUG][logstash.runner ] queue.max_bytes: 1073741824 | |
[2017-06-22T12:27:58,680][DEBUG][logstash.runner ] queue.max_events: 0 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] queue.checkpoint.acks: 1024 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] queue.checkpoint.writes: 1024 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] queue.checkpoint.interval: 1000 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] slowlog.threshold.warn: -1 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] slowlog.threshold.info: -1 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] slowlog.threshold.debug: -1 | |
[2017-06-22T12:27:58,681][DEBUG][logstash.runner ] slowlog.threshold.trace: -1 | |
[2017-06-22T12:27:58,682][DEBUG][logstash.runner ] path.queue: "/usr/share/logstash/data/queue" | |
[2017-06-22T12:27:58,682][DEBUG][logstash.runner ] *path.settings: "/etc/logstash" (default: "/usr/share/logstash/config") | |
[2017-06-22T12:27:58,682][DEBUG][logstash.runner ] *path.logs: "/var/log/logstash" (default: "/usr/share/logstash/logs") | |
[2017-06-22T12:27:58,682][DEBUG][logstash.runner ] --------------- Logstash Settings ------------------- | |
[2017-06-22T12:27:58,696][DEBUG][logstash.agent ] Agent: Configuring metric collection | |
[2017-06-22T12:27:58,699][DEBUG][logstash.instrument.periodicpoller.os] PeriodicPoller: Starting {:polling_interval=>5, :polling_timeout=>120} | |
[2017-06-22T12:27:58,719][DEBUG][logstash.instrument.periodicpoller.jvm] PeriodicPoller: Starting {:polling_interval=>5, :polling_timeout=>120} | |
[2017-06-22T12:27:58,753][DEBUG][logstash.instrument.periodicpoller.persistentqueue] PeriodicPoller: Starting {:polling_interval=>5, :polling_timeout=>120} | |
[2017-06-22T12:27:58,764][DEBUG][logstash.agent ] Reading config file {:config_file=>"/etc/logstash/conf.d/mobidiag-input-es-poc-join.conf"} | |
[2017-06-22T12:27:58,767][DEBUG][logstash.agent ] | |
The following is the content of a file {:config_file=>"/etc/logstash/conf.d/mobidiag-input-es-poc-join.conf"} | |
[2017-06-22T12:27:58,768][DEBUG][logstash.agent ] | |
input { | |
jdbc { | |
id => "es-poc-input-es-poc" | |
connection_retry_attempts => "10" | |
connection_retry_attempts_wait_time => "5" | |
jdbc_driver_library => "/usr/share/logstash/ojdbc6.jar" | |
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver" | |
jdbc_connection_string => "jdbc-connection-string" | |
jdbc_user => "username&password" | |
jdbc_password => "username&password" | |
jdbc_default_timezone => "America/Denver" | |
schedule => "* * * * *" | |
statement => "SELECT * FROM table1 NATURAL JOIN table2 WHERE start_ts >= :sql_last_value ORDER BY start_ts" | |
last_run_metadata_path => "/usr/share/logstash/.poc_jdbc_last_run" | |
clean_run => "false" | |
record_last_run => "true" | |
sql_log_level => "debug" | |
type => "es-poc" | |
tracking_column => "start_ts" | |
tracking_column_type => "timestamp" | |
use_column_value => "true" | |
} | |
} | |
filter { | |
if [start_ts] { | |
date { | |
id => "StartTSDateFilter" | |
match => [ "start_ts", "yyyy-MM-dd'T'HH:mm:ss,SSSZ", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS'Z'", "dd-MMM-yy HH.mm.ss,SSSSSS aa" ] | |
target => "@timestamp" | |
add_tag => [ "matchedTS" ] | |
} | |
} | |
fingerprint { | |
id => "FingerPrintFilter" | |
method => "SHA256" | |
key => "FUXyAr8RfatAdo2j" | |
source => [ "field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "start_ts", "end_ts", "field12" ] | |
concatenate_sources => "true" | |
} | |
} | |
output { | |
elasticsearch { | |
id => "es5-poc-output-02" | |
hosts => ["es5-hostname"] | |
index => "es-poc-%{+YYYY-MM-dd}" | |
document_type => "es-poc" | |
document_id => "%{fingerprint}" | |
template_name => "es-poc*" | |
template => "/usr/share/logstash/es-poc-template.json" | |
} | |
stdout { | |
codec => rubydebug | |
} | |
} | |
[2017-06-22T12:27:58,769][DEBUG][logstash.agent ] | |
The following is the merged configuration | |
[2017-06-22T12:27:58,769][DEBUG][logstash.agent ] | |
input { | |
jdbc { | |
id => "es-poc-input-es-poc" | |
connection_retry_attempts => "10" | |
connection_retry_attempts_wait_time => "5" | |
jdbc_driver_library => "/usr/share/logstash/ojdbc6.jar" | |
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver" | |
jdbc_connection_string => "jdbc-connection-string" | |
jdbc_user => "username&password" | |
jdbc_password => "username&password" | |
jdbc_default_timezone => "America/Denver" | |
schedule => "* * * * *" | |
statement => "SELECT * FROM table1 NATURAL JOIN table2 WHERE start_ts >= :sql_last_value ORDER BY start_ts" | |
last_run_metadata_path => "/usr/share/logstash/.poc_jdbc_last_run" | |
clean_run => "false" | |
record_last_run => "true" | |
sql_log_level => "debug" | |
type => "es-poc" | |
tracking_column => "start_ts" | |
tracking_column_type => "timestamp" | |
use_column_value => "true" | |
} | |
} | |
filter { | |
if [start_ts] { | |
date { | |
id => "StartTSDateFilter" | |
match => [ "start_ts", "yyyy-MM-dd'T'HH:mm:ss,SSSZ", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS'Z'", "dd-MMM-yy HH.mm.ss,SSSSSS aa" ] | |
target => "@timestamp" | |
add_tag => [ "matchedTS" ] | |
} | |
} | |
fingerprint { | |
id => "FingerPrintFilter" | |
method => "SHA256" | |
key => "FUXyAr8RfatAdo2j" | |
source => [ "field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "start_ts", "end_ts", "field12" ] | |
concatenate_sources => "true" | |
} | |
} | |
output { | |
elasticsearch { | |
id => "es5-poc-output-02" | |
hosts => ["es5-hostname"] | |
index => "es-poc-%{+YYYY-MM-dd}" | |
document_type => "es-poc" | |
document_id => "%{fingerprint}" | |
template_name => "es-poc*" | |
template => "/usr/share/logstash/es-poc-template.json" | |
} | |
stdout { | |
codec => rubydebug | |
} | |
} | |
[2017-06-22T12:27:58,940][DEBUG][logstash.pipeline ] Compiled pipeline code {:code=>" @inputs = []\n @filters = []\n @outputs = []\n @periodic_flushers = []\n @shutdown_flushers = []\n @generated_objects = {}\n\n @generated_objects[:input_jdbc_1] = plugin(\"input\", \"jdbc\", LogStash::Util.hash_merge_many({ \"id\" => (\"es-poc-input-es-poc\") }, { \"connection_retry_attempts\" => (\"10\") }, { \"connection_retry_attempts_wait_time\" => (\"5\") }, { \"jdbc_driver_library\" => (\"/usr/share/logstash/ojdbc6.jar\") }, { \"jdbc_driver_class\" => (\"Java::oracle.jdbc.driver.OracleDriver\") }, { \"jdbc_connection_string\" => (\"jdbc-connection-string\") }, { \"jdbc_user\" => (\"username&password\") }, { \"jdbc_password\" => (\"username&password\") }, { \"jdbc_default_timezone\" => (\"America/Denver\") }, { \"schedule\" => (\"* * * * *\") }, { \"statement\" => (\"SELECT * FROM table1 NATURAL JOIN table2 WHERE start_ts >= :sql_last_value ORDER BY start_ts\") }, { \"last_run_metadata_path\" => (\"/usr/share/logstash/.poc_jdbc_last_run\") }, { \"clean_run\" => (\"false\") }, { \"record_last_run\" => (\"true\") }, { \"sql_log_level\" => (\"debug\") }, { \"type\" => (\"es-poc\") }, { \"tracking_column\" => (\"start_ts\") }, { \"tracking_column_type\" => (\"timestamp\") }, { \"use_column_value\" => (\"true\") }))\n\n @inputs << @generated_objects[:input_jdbc_1]\n\n @generated_objects[:filter_date_2] = plugin(\"filter\", \"date\", LogStash::Util.hash_merge_many({ \"id\" => (\"StartTSDateFilter\") }, { \"match\" => [(\"start_ts\"), (\"yyyy-MM-dd'T'HH:mm:ss,SSSZ\"), (\"ISO8601\"), (\"yyyy-MM-dd'T'HH:mm:ss,SSS'Z'\"), (\"dd-MMM-yy HH.mm.ss,SSSSSS aa\")] }, { \"target\" => (\"@timestamp\") }, { \"add_tag\" => [(\"matchedTS\")] }))\n\n @filters << @generated_objects[:filter_date_2]\n\n @generated_objects[:filter_date_2_flush] = lambda do |options, &block|\n @logger.debug? && @logger.debug(\"Flushing\", :plugin => @generated_objects[:filter_date_2])\n\n events = @generated_objects[:filter_date_2].flush(options)\n\n return if events.nil? || events.empty?\n\n @logger.debug? && @logger.debug(\"Flushing\", :plugin => @generated_objects[:filter_date_2], :events => events.map { |x| x.to_hash })\n\n events = @generated_objects[:filter_fingerprint_3].multi_filter(events)\n \n\n\n events.each{|e| block.call(e)}\n end\n\n if @generated_objects[:filter_date_2].respond_to?(:flush)\n @periodic_flushers << @generated_objects[:filter_date_2_flush] if @generated_objects[:filter_date_2].periodic_flush\n @shutdown_flushers << @generated_objects[:filter_date_2_flush]\n end\n\n @generated_objects[:filter_fingerprint_3] = plugin(\"filter\", \"fingerprint\", LogStash::Util.hash_merge_many({ \"id\" => (\"FingerPrintFilter\") }, { \"method\" => (\"SHA256\") }, { \"key\" => (\"FUXyAr8RfatAdo2j\") }, { \"source\" => [(\"id\"), (\"error_cnt\"), (\"service\"), (\"operation\"), (\"global_id\"), (\"pool\"), (\"server\"), (\"appid\"), (\"appvers\"), (\"tld\"), (\"longerrormessage\"), (\"start_ts\"), (\"end_ts\"), (\"filled\")] }, { \"concatenate_sources\" => (\"true\") }))\n\n @filters << @generated_objects[:filter_fingerprint_3]\n\n @generated_objects[:filter_fingerprint_3_flush] = lambda do |options, &block|\n @logger.debug? && @logger.debug(\"Flushing\", :plugin => @generated_objects[:filter_fingerprint_3])\n\n events = @generated_objects[:filter_fingerprint_3].flush(options)\n\n return if events.nil? || events.empty?\n\n @logger.debug? && @logger.debug(\"Flushing\", :plugin => @generated_objects[:filter_fingerprint_3], :events => events.map { |x| x.to_hash })\n\n \n\n events.each{|e| block.call(e)}\n end\n\n if @generated_objects[:filter_fingerprint_3].respond_to?(:flush)\n @periodic_flushers << @generated_objects[:filter_fingerprint_3_flush] if @generated_objects[:filter_fingerprint_3].periodic_flush\n @shutdown_flushers << @generated_objects[:filter_fingerprint_3_flush]\n end\n\n @generated_objects[:output_elasticsearch_4] = plugin(\"output\", \"elasticsearch\", LogStash::Util.hash_merge_many({ \"id\" => (\"es5-poc-output-02\") }, { \"hosts\" => [(\"es5-hostname\")] }, { \"index\" => (\"es-poc-%{+YYYY-MM-dd}\") }, { \"document_type\" => (\"es-poc\") }, { \"document_id\" => (\"%{fingerprint}\") }, { \"template_name\" => (\"es-poc*\") }, { \"template\" => (\"/usr/share/logstash/es-poc-template.json\") }))\n\n @outputs << @generated_objects[:output_elasticsearch_4]\n\n @generated_objects[:output_stdout_5] = plugin(\"output\", \"stdout\", LogStash::Util.hash_merge_many({ \"codec\" => (\"rubydebug\") }))\n\n @outputs << @generated_objects[:output_stdout_5]\n\n define_singleton_method :filter_func do |event|\n events = [event]\n @logger.debug? && @logger.debug(\"filter received\", \"event\" => event.to_hash)\n events = @generated_objects[:cond_func_1].call(events)\n events = @generated_objects[:filter_fingerprint_3].multi_filter(events)\n \n events\n end\n define_singleton_method :output_func do |event|\n targeted_outputs = []\n @logger.debug? && @logger.debug(\"output received\", \"event\" => event.to_hash)\n targeted_outputs << @generated_objects[:output_elasticsearch_4]\n targeted_outputs << @generated_objects[:output_stdout_5]\n \n targeted_outputs\n end\n @generated_objects[:cond_func_1] = lambda do |input_events|\n result = []\n input_events.each do |event|\n events = [event]\n if ((event.get(\"[start_ts]\"))) # if [start_ts]\n events = @generated_objects[:filter_date_2].multi_filter(events)\n \n\n end\n result += events\n end\n result\n end\n"} | |
[2017-06-22T12:27:58,956][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"jdbc", :type=>"input", :class=>LogStash::Inputs::Jdbc} | |
[2017-06-22T12:27:58,974][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"plain", :type=>"codec", :class=>LogStash::Codecs::Plain} | |
[2017-06-22T12:27:58,975][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@id = "plain_04092f46-6cb6-4c37-9555-566b24d98fbd" | |
[2017-06-22T12:27:58,975][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@enable_metric = true | |
[2017-06-22T12:27:58,975][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@charset = "UTF-8" | |
[2017-06-22T12:27:58,978][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@id = "es-poc-input-es-poc" | |
[2017-06-22T12:27:58,978][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@connection_retry_attempts = 10 | |
[2017-06-22T12:27:58,979][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@connection_retry_attempts_wait_time = 5 | |
[2017-06-22T12:27:58,981][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_driver_library = "/usr/share/logstash/ojdbc6.jar" | |
[2017-06-22T12:27:58,982][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_driver_class = "Java::oracle.jdbc.driver.OracleDriver" | |
[2017-06-22T12:27:58,982][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_connection_string = "jdbc-connection-string" | |
[2017-06-22T12:27:58,982][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_user = "username&password" | |
[2017-06-22T12:27:58,982][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_password = <password> | |
[2017-06-22T12:27:58,982][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_default_timezone = "America/Denver" | |
[2017-06-22T12:27:58,984][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@schedule = "* * * * *" | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@statement = "SELECT * FROM table1 NATURAL JOIN table2 WHERE start_ts >= :sql_last_value ORDER BY start_ts" | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@last_run_metadata_path = "/usr/share/logstash/.poc_jdbc_last_run" | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@clean_run = false | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@record_last_run = true | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@sql_log_level = "debug" | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@type = "es-poc" | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@tracking_column = "start_ts" | |
[2017-06-22T12:27:58,985][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@tracking_column_type = "timestamp" | |
[2017-06-22T12:27:58,987][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@use_column_value = true | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@enable_metric = true | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@codec = <LogStash::Codecs::Plain id=>"plain_04092f46-6cb6-4c37-9555-566b24d98fbd", enable_metric=>true, charset=>"UTF-8"> | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@add_field = {} | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_paging_enabled = false | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_page_size = 100000 | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_validate_connection = false | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_validation_timeout = 3600 | |
[2017-06-22T12:27:58,988][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@jdbc_pool_timeout = 5 | |
[2017-06-22T12:27:58,989][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@sequel_opts = {} | |
[2017-06-22T12:27:58,989][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@parameters = {} | |
[2017-06-22T12:27:58,989][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@lowercase_column_names = true | |
[2017-06-22T12:27:58,989][DEBUG][logstash.inputs.jdbc ] config LogStash::Inputs::Jdbc/@columns_charset = {} | |
[2017-06-22T12:27:59,002][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"date", :type=>"filter", :class=>LogStash::Filters::Date} | |
[2017-06-22T12:27:59,006][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@id = "StartTSDateFilter" | |
[2017-06-22T12:27:59,007][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@match = ["start_ts", "yyyy-MM-dd'T'HH:mm:ss,SSSZ", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS'Z'", "dd-MMM-yy HH.mm.ss,SSSSSS aa"] | |
[2017-06-22T12:27:59,007][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@target = "@timestamp" | |
[2017-06-22T12:27:59,008][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@add_tag = ["matchedTS"] | |
[2017-06-22T12:27:59,008][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@enable_metric = true | |
[2017-06-22T12:27:59,008][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@remove_tag = [] | |
[2017-06-22T12:27:59,010][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@add_field = {} | |
[2017-06-22T12:27:59,011][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@remove_field = [] | |
[2017-06-22T12:27:59,011][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@periodic_flush = false | |
[2017-06-22T12:27:59,011][DEBUG][logstash.filters.date ] config LogStash::Filters::Date/@tag_on_failure = ["_dateparsefailure"] | |
[2017-06-22T12:27:59,036][DEBUG][org.logstash.filters.DateFilter] Date filter with format=yyyy-MM-dd'T'HH:mm:ss,SSSZ, locale=null, timezone=null built as org.logstash.filters.parser.JodaParser | |
[2017-06-22T12:27:59,119][DEBUG][org.logstash.filters.DateFilter] Date filter with format=ISO8601, locale=null, timezone=null built as org.logstash.filters.parser.CasualISO8601Parser | |
[2017-06-22T12:27:59,119][DEBUG][org.logstash.filters.DateFilter] Date filter with format=yyyy-MM-dd'T'HH:mm:ss,SSS'Z', locale=null, timezone=null built as org.logstash.filters.parser.JodaParser | |
[2017-06-22T12:27:59,120][DEBUG][org.logstash.filters.DateFilter] Date filter with format=dd-MMM-yy HH.mm.ss,SSSSSS aa, locale=null, timezone=null built as org.logstash.filters.parser.JodaParser | |
[2017-06-22T12:27:59,183][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"fingerprint", :type=>"filter", :class=>LogStash::Filters::Fingerprint} | |
[2017-06-22T12:27:59,196][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@id = "FingerPrintFilter" | |
[2017-06-22T12:27:59,196][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@method = "SHA256" | |
[2017-06-22T12:27:59,197][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@key = "FUXyAr8RfatAdo2j" | |
[2017-06-22T12:27:59,198][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@source = ["field1", "field2", "field3", "field4", "field5", "field6", "field7", "field8", "field9", "field10", "field11", "start_ts", "end_ts", "field12"] | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@concatenate_sources = true | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@enable_metric = true | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@add_tag = [] | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@remove_tag = [] | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@add_field = {} | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@remove_field = [] | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@periodic_flush = false | |
[2017-06-22T12:27:59,199][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@target = "fingerprint" | |
[2017-06-22T12:27:59,200][DEBUG][logstash.filters.fingerprint] config LogStash::Filters::Fingerprint/@base64encode = false | |
[2017-06-22T12:27:59,605][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"elasticsearch", :type=>"output", :class=>LogStash::Outputs::ElasticSearch} | |
[2017-06-22T12:27:59,625][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@id = "plain_9549e092-1d4e-4faf-ad4c-620ac51d0360" | |
[2017-06-22T12:27:59,627][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@enable_metric = true | |
[2017-06-22T12:27:59,628][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@charset = "UTF-8" | |
[2017-06-22T12:27:59,629][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@id = "es5-poc-output-02" | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@hosts = [//es5-hostname] | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@index = "es-poc-%{+YYYY-MM-dd}" | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@document_type = "es-poc" | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@document_id = "%{fingerprint}" | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_name = "es-poc*" | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template = "/usr/share/logstash/es-poc-template.json" | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@enable_metric = true | |
[2017-06-22T12:27:59,632][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@codec = <LogStash::Codecs::Plain id=>"plain_9549e092-1d4e-4faf-ad4c-620ac51d0360", enable_metric=>true, charset=>"UTF-8"> | |
[2017-06-22T12:27:59,633][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@workers = 1 | |
[2017-06-22T12:27:59,633][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@manage_template = true | |
[2017-06-22T12:27:59,633][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_overwrite = false | |
[2017-06-22T12:27:59,634][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@parent = nil | |
[2017-06-22T12:27:59,634][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@idle_flush_time = 1 | |
[2017-06-22T12:27:59,635][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@upsert = "" | |
[2017-06-22T12:27:59,635][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@doc_as_upsert = false | |
[2017-06-22T12:27:59,635][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script = "" | |
[2017-06-22T12:27:59,635][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_type = "inline" | |
[2017-06-22T12:27:59,635][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_lang = "painless" | |
[2017-06-22T12:27:59,636][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_var_name = "event" | |
[2017-06-22T12:27:59,637][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@scripted_upsert = false | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_initial_interval = 2 | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_max_interval = 64 | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_on_conflict = 1 | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pipeline = nil | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@action = "index" | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_certificate_verification = true | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing = false | |
[2017-06-22T12:27:59,638][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing_delay = 5 | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@timeout = 60 | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@failure_type_logging_whitelist = [] | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max = 1000 | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max_per_route = 100 | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@resurrect_delay = 5 | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@validate_after_inactivity = 10000 | |
[2017-06-22T12:27:59,639][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@http_compression = false | |
[2017-06-22T12:27:59,643][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"stdout", :type=>"output", :class=>LogStash::Outputs::Stdout} | |
[2017-06-22T12:27:59,651][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"rubydebug", :type=>"codec", :class=>LogStash::Codecs::RubyDebug} | |
[2017-06-22T12:27:59,653][DEBUG][logstash.codecs.rubydebug] config LogStash::Codecs::RubyDebug/@id = "rubydebug_82b6cc7a-486f-425b-ad32-e0f0af872dc9" | |
[2017-06-22T12:27:59,653][DEBUG][logstash.codecs.rubydebug] config LogStash::Codecs::RubyDebug/@enable_metric = true | |
[2017-06-22T12:27:59,655][DEBUG][logstash.codecs.rubydebug] config LogStash::Codecs::RubyDebug/@metadata = false | |
[2017-06-22T12:27:59,735][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@codec = <LogStash::Codecs::RubyDebug id=>"rubydebug_82b6cc7a-486f-425b-ad32-e0f0af872dc9", enable_metric=>true, metadata=>false> | |
[2017-06-22T12:27:59,736][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@id = "bebbaeff1c4a384d4d225e43cd39f7ce71060fc4-5" | |
[2017-06-22T12:27:59,736][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@enable_metric = true | |
[2017-06-22T12:27:59,736][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@workers = 1 | |
[2017-06-22T12:27:59,744][DEBUG][logstash.agent ] starting agent | |
[2017-06-22T12:27:59,745][DEBUG][logstash.agent ] starting pipeline {:id=>"main"} | |
[2017-06-22T12:27:59,749][DEBUG][logstash.outputs.elasticsearch] Normalizing http path {:path=>nil, :normalized=>nil} | |
[2017-06-22T12:28:00,055][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://es5-poc-lb-xz6p1.vip.stratus.phx.ebay.com/]}} | |
[2017-06-22T12:28:00,056][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://es5-poc-lb-xz6p1.vip.stratus.phx.ebay.com/, :path=>"/"} | |
[2017-06-22T12:28:00,150][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>#<URI::HTTP:0xae4f0f URL:http://es5-poc-lb-xz6p1.vip.stratus.phx.ebay.com/>} | |
[2017-06-22T12:28:00,153][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>"/usr/share/logstash/es-poc-template.json"} | |
[2017-06-22T12:28:00,195][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"es-poc*", "version"=>1005, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>"3"}, "mappings"=>{"poc"=>{"properties"=>{"@timestamp"=>{"type"=>"date", "format"=>"dateOptionalTime"}, "field1"=>{"index"=>"not_analyzed", "type"=>"string"}, "field2"=>{"index"=>"not_analyzed", "type"=>"string"}, "field3"=>{"index"=>"not_analyzed", "type"=>"long"}, "end_ts"=>{"type"=>"date", "format"=>"yyyy-MM-dd'T'HH:mm:ss.SSSZ"}, "field4"=>{"index"=>"not_analyzed", "type"=>"long"}, "field5"=>{"index"=>"not_analyzed", "type"=>"string"}, "field6"=>{"index"=>"not_analyzed", "type"=>"string"}, "field7"=>{"index"=>"not_analyzed", "type"=>"string"}, "field8"=>{"index"=>"not_analyzed", "type"=>"string"}, "field9"=>{"index"=>"not_analyzed", "type"=>"string"}, "field10"=>{"index"=>"not_analyzed", "type"=>"string"}, "start_ts"=>{"type"=>"date", "format"=>"yyyy-MM-dd'T'HH:mm:ss.SSSZ"}, "field11"=>{"index"=>"not_analyzed", "type"=>"string"}}}}}} | |
[2017-06-22T12:28:00,201][DEBUG][logstash.outputs.elasticsearch] Found existing Elasticsearch template. Skipping template management {:name=>"es-poc*"} | |
[2017-06-22T12:28:00,203][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>[#<URI::Generic:0x6e3d96de URL://es5-hostname>]} | |
[2017-06-22T12:28:00,209][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>1200, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>9600} | |
[2017-06-22T12:28:00,407][INFO ][logstash.pipeline ] Pipeline main started | |
[2017-06-22T12:28:00,425][DEBUG][logstash.agent ] Starting puma | |
[2017-06-22T12:28:00,431][DEBUG][logstash.agent ] Trying to start WebServer {:port=>9600} | |
[2017-06-22T12:28:00,432][DEBUG][logstash.api.service ] [api-service] start | |
[2017-06-22T12:28:00,487][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600} | |
[2017-06-22T12:28:05,406][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:10,407][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:15,410][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:20,410][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:25,412][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:30,415][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:35,416][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:40,417][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:45,418][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:50,418][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:28:55,418][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:29:00,420][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:29:05,421][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:29:09,675][DEBUG][logstash.inputs.jdbc ] (8.804000s) SELECT * FROM (SELECT count(*) "COUNT" FROM (SELECT * FROM table1 NATURAL JOIN table2 WHERE start_ts >= TIMESTAMP '1969-12-31 17:00:00.000000 -07:00' ORDER BY start_ts) "T1") "T1" WHERE (ROWNUM <= 1) | |
[2017-06-22T12:29:09,681][DEBUG][logstash.inputs.jdbc ] Executing JDBC query {:statement=>"SELECT * FROM table1 NATURAL JOIN table2 WHERE start_ts >= :sql_last_value ORDER BY start_ts", :parameters=>{:sql_last_value=>1970-01-01 00:00:00 UTC}, :count=>3576251} | |
[2017-06-22T12:29:10,421][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:29:15,421][DEBUG][logstash.pipeline ] Pushing flush onto pipeline | |
[2017-06-22T12:29:20,420][DEBUG][logstash.pipeline ] Pushing flush onto pipeline |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment