2014-04-15 11:32:41 +00:00
# encoding: utf-8
require " logstash/outputs/base "
require " logstash/namespace "
2014-05-31 13:33:59 +00:00
require " stud/buffer "
2014-08-09 12:56:11 +00:00
require " java "
2015-11-14 20:04:16 +00:00
require " logstash-output-jdbc_jars "
2015-11-18 10:06:11 +00:00
require " logstash-output-jdbc_ring-buffer "
2014-04-15 11:32:41 +00:00
class LogStash :: Outputs :: Jdbc < LogStash :: Outputs :: Base
2014-05-31 13:33:59 +00:00
# Adds buffer support
include Stud :: Buffer
2014-04-15 11:32:41 +00:00
config_name " jdbc "
2015-12-23 09:42:53 +00:00
# Driver class - Reintroduced for https://github.com/theangryangel/logstash-output-jdbc/issues/26
config :driver_class , :validate = > :string
# Does the JDBC driver support autocommit?
config :driver_auto_commit , :validate = > :boolean , :default = > true , :required = > true
2014-04-15 11:32:41 +00:00
2015-11-14 20:04:16 +00:00
# Where to find the jar
# Defaults to not required, and to the original behaviour
config :driver_jar_path , :validate = > :string , :required = > false
# jdbc connection string
2014-04-15 11:32:41 +00:00
config :connection_string , :validate = > :string , :required = > true
2015-11-14 20:04:16 +00:00
# jdbc username - optional, maybe in the connection string
config :username , :validate = > :string , :required = > false
# jdbc password - optional, maybe in the connection string
config :password , :validate = > :string , :required = > false
2014-08-09 12:56:11 +00:00
# [ "insert into table (message) values(?)", "%{message}" ]
2014-04-15 11:32:41 +00:00
config :statement , :validate = > :array , :required = > true
2014-05-31 13:33:59 +00:00
2015-11-15 12:35:57 +00:00
# If this is an unsafe statement, use event.sprintf
# This also has potential performance penalties due to having to create a
# new statement for each event, rather than adding to the batch and issuing
# multiple inserts in 1 go
config :unsafe_statement , :validate = > :boolean , :default = > false
2015-11-14 20:04:16 +00:00
# Number of connections in the pool to maintain
config :max_pool_size , :validate = > :number , :default = > 5
# Connection timeout
2016-02-16 15:29:08 +00:00
config :connection_timeout , :validate = > :number , :default = > 10000
2015-11-14 20:04:16 +00:00
2014-06-01 12:16:33 +00:00
# We buffer a certain number of events before flushing that out to SQL.
# This setting controls how many events will be buffered before sending a
# batch of events.
2014-05-31 13:33:59 +00:00
config :flush_size , :validate = > :number , :default = > 1000
# The amount of time since last flush before a flush is forced.
#
# This setting helps ensure slow event rates don't get stuck in Logstash.
# For example, if your `flush_size` is 100, and you have received 10 events,
# and it has been more than `idle_flush_time` seconds since the last flush,
# Logstash will flush those 10 events automatically.
#
# This helps keep both fast and slow log streams moving along in
# a timely manner.
2014-08-09 12:56:11 +00:00
#
# If you change this value please ensure that you change
2015-11-18 10:06:11 +00:00
# max_flush_exceptions accordingly.
2014-05-31 13:33:59 +00:00
config :idle_flush_time , :validate = > :number , :default = > 1
2014-08-09 12:56:11 +00:00
2015-11-18 10:06:11 +00:00
# Maximum number of sequential flushes which encounter exceptions, before we stop retrying.
2014-08-09 12:56:11 +00:00
# If set to < 1, then it will infinitely retry.
2015-11-18 10:06:11 +00:00
#
# You should carefully tune this in relation to idle_flush_time if your SQL server
# is not highly available.
# i.e. If your idle_flush_time is 1, and your max_flush_exceptions is 200, and your SQL server takes
# longer than 200 seconds to reboot, then logstash will stop.
config :max_flush_exceptions , :validate = > :number , :default = > 0
2014-08-09 12:56:11 +00:00
2015-11-18 10:06:11 +00:00
config :max_repeat_exceptions , :obsolete = > " This has been replaced by max_flush_exceptions - which behaves slightly differently. Please check the documentation. "
config :max_repeat_exceptions_time , :obsolete = > " This is no longer required "
2014-08-09 12:56:11 +00:00
2014-04-15 11:32:41 +00:00
public
def register
2014-08-09 12:56:11 +00:00
@logger . info ( " JDBC - Starting up " )
2014-04-15 11:32:41 +00:00
2015-11-14 20:04:16 +00:00
load_jar_files!
2014-04-15 11:32:41 +00:00
2015-11-18 10:06:11 +00:00
@exceptions_tracker = RingBuffer . new ( @max_flush_exceptions )
2014-05-31 13:33:59 +00:00
if ( @flush_size > 1000 )
2015-11-14 20:04:16 +00:00
@logger . warn ( " JDBC - Flush size is set to > 1000 " )
2014-08-09 12:56:11 +00:00
end
2015-12-23 10:06:50 +00:00
if @statement . length < 1
@logger . error ( " JDBC - No statement provided. Configuration error. " )
end
if ( ! @unsafe_statement and @statement . length < 2 )
@logger . error ( " JDBC - Statement has no parameters. No events will be inserted into SQL as you're not passing any event data. Likely configuration error. " )
end
2015-12-30 12:04:23 +00:00
setup_and_test_pool!
2016-04-16 13:33:30 +00:00
2014-05-31 13:33:59 +00:00
buffer_initialize (
:max_items = > @flush_size ,
:max_interval = > @idle_flush_time ,
:logger = > @logger
)
2014-04-15 11:32:41 +00:00
end
def receive ( event )
2015-11-17 10:32:16 +00:00
return unless output? ( event ) or event . cancelled?
2014-04-16 15:41:45 +00:00
return unless @statement . length > 0
2014-05-31 13:33:59 +00:00
buffer_receive ( event )
end
2014-04-16 15:41:45 +00:00
2014-05-31 13:33:59 +00:00
def flush ( events , teardown = false )
2015-11-15 12:35:57 +00:00
if @unsafe_statement == true
unsafe_flush ( events , teardown )
else
safe_flush ( events , teardown )
2014-04-22 09:01:41 +00:00
end
2014-04-16 15:41:45 +00:00
end
2014-08-09 12:56:11 +00:00
def on_flush_error ( e )
2015-11-18 10:06:11 +00:00
return if @max_flush_exceptions < 1
2014-08-09 12:56:11 +00:00
2015-11-18 10:06:11 +00:00
@exceptions_tracker << e . class
2014-08-09 12:56:11 +00:00
2015-11-18 10:06:11 +00:00
if @exceptions_tracker . reject { | i | i . nil? } . count > = @max_flush_exceptions
@logger . error ( " JDBC - max_flush_exceptions has been reached " )
2015-11-22 18:57:13 +00:00
log_jdbc_exception ( e )
raise LogStash :: ShutdownSignal . new
2014-08-09 12:56:11 +00:00
end
end
2014-04-16 15:41:45 +00:00
def teardown
2014-05-31 13:33:59 +00:00
buffer_flush ( :final = > true )
2015-11-14 20:04:16 +00:00
@pool . close ( )
2014-04-16 15:41:45 +00:00
super
2014-04-15 11:32:41 +00:00
end
2015-11-14 20:04:16 +00:00
private
2015-12-30 12:04:23 +00:00
def setup_and_test_pool!
# Setup pool
@pool = Java :: ComZaxxerHikari :: HikariDataSource . new
@pool . setAutoCommit ( @driver_auto_commit )
@pool . setDriverClassName ( @driver_class ) if @driver_class
@pool . setJdbcUrl ( @connection_string )
@pool . setUsername ( @username ) if @username
@pool . setPassword ( @password ) if @password
@pool . setMaximumPoolSize ( @max_pool_size )
@pool . setConnectionTimeout ( @connection_timeout )
2016-02-16 15:29:08 +00:00
validate_connection_timeout = ( @connection_timeout / 1000 ) / 2
2015-12-30 12:04:23 +00:00
# Test connection
test_connection = @pool . getConnection ( )
2016-02-16 15:29:08 +00:00
unless test_connection . isValid ( validate_connection_timeout )
2015-12-30 12:04:23 +00:00
@logger . error ( " JDBC - Connection is not valid. Please check connection string or that your JDBC endpoint is available. " )
end
test_connection . close ( )
end
2015-11-14 20:04:16 +00:00
def load_jar_files!
# Load jar from driver path
unless @driver_jar_path . nil?
raise Exception . new ( " JDBC - Could not find jar file at given path. Check config. " ) unless File . exists? @driver_jar_path
require @driver_jar_path
return
end
# Revert original behaviour of loading from vendor directory
# if no path given
if ENV [ 'LOGSTASH_HOME' ]
jarpath = File . join ( ENV [ 'LOGSTASH_HOME' ] , " /vendor/jar/jdbc/*.jar " )
else
jarpath = File . join ( File . dirname ( __FILE__ ) , " ../../../vendor/jar/jdbc/*.jar " )
end
@logger . debug ( " JDBC - jarpath " , path : jarpath )
jars = Dir [ jarpath ]
raise Exception . new ( " JDBC - No jars found in jarpath. Have you read the README? " ) if jars . empty?
jars . each do | jar |
@logger . debug ( " JDBC - Loaded jar " , :jar = > jar )
require jar
end
end
2015-11-15 12:35:57 +00:00
def safe_flush ( events , teardown = false )
2016-04-16 13:33:30 +00:00
connection = nil
statement = nil
begin
connection = @pool . getConnection ( )
statement = connection . prepareStatement ( @statement [ 0 ] )
2015-11-15 12:35:57 +00:00
2016-04-16 13:33:30 +00:00
events . each do | event |
next if event . cancelled?
next if @statement . length < 2
statement = add_statement_event_params ( statement , event )
2015-11-15 12:35:57 +00:00
2016-04-16 13:33:30 +00:00
statement . addBatch ( )
end
2015-11-15 12:35:57 +00:00
statement . executeBatch ( )
statement . close ( )
2015-11-19 14:29:47 +00:00
@exceptions_tracker << nil
2015-11-15 12:35:57 +00:00
rescue = > e
2015-11-17 10:32:16 +00:00
log_jdbc_exception ( e )
2015-11-15 12:35:57 +00:00
ensure
2016-04-16 13:33:30 +00:00
statement . close ( ) unless statement . nil?
connection . close ( ) unless connection . nil?
2015-11-15 12:35:57 +00:00
end
end
def unsafe_flush ( events , teardown = false )
2016-04-16 13:33:30 +00:00
connection = nil
statement = nil
begin
connection = @pool . getConnection ( )
2015-11-15 12:35:57 +00:00
2016-04-16 13:33:30 +00:00
events . each do | event |
next if event . cancelled?
statement = connection . prepareStatement ( event . sprintf ( @statement [ 0 ] ) )
statement = add_statement_event_params ( statement , event ) if @statement . length > 1
2015-11-15 12:35:57 +00:00
2015-11-17 10:32:16 +00:00
statement . execute ( )
2016-04-16 13:33:30 +00:00
2015-11-17 10:32:16 +00:00
# cancel the event, since we may end up outputting the same event multiple times
# if an exception happens later down the line
event . cancel
2015-11-19 14:29:47 +00:00
@exceptions_tracker << nil
2015-11-17 10:32:16 +00:00
end
2016-04-16 13:33:30 +00:00
rescue = > e
log_jdbc_exception ( e )
ensure
statement . close ( ) unless statement . nil?
connection . close ( ) unless connection . nil?
2015-11-15 12:35:57 +00:00
end
end
def add_statement_event_params ( statement , event )
@statement [ 1 .. - 1 ] . each_with_index do | i , idx |
case event [ i ]
Fix toString method of LogStash::Timestamp
According to LogStash::Timestamp (https://github.com/elastic/logstash/blob/bb30cc773bcf34d63d634dbfcf2803278a5140b3/logstash-core-event/lib/logstash/timestamp.rb) doesn't support iso8601, which results in error if the timestamp of logstash is used directly.
If should support to_s of to_iso8601.
:message=>"Failed to flush outgoing items", :outgoing_count=>1, :exception=>"NoMethodError", :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:255:in `add_statement_event_params'", "org/jruby/RubyArray.java:1613:in `each'", "org/jruby/RubyEnumerable.java:974:in `each_with_index'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:251:in `add_statement_event_params'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:203:in `safe_flush'", "org/jruby/RubyArray.java:1613:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:200:in `safe_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:120:in `flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:219:in `buffer_flush'", "org/jruby/RubyHash.java:1342:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:216:in `buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:159:in `buffer_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:113:in `receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/outputs/base.rb:83:in `multi_receive'", "org/jruby/RubyArray.java:1613:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/outputs/base.rb:83:in `multi_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/output_delegator.rb:130:in `worker_multi_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/output_delegator.rb:114:in `multi_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:305:in `output_batch'", "org/jruby/RubyHash.java:1342:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:305:in `output_batch'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:236:in `worker_loop'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:205:in `start_workers'"], :level=>:warn}
2016-04-11 13:19:48 +00:00
when Time
2015-11-15 12:35:57 +00:00
# Most reliable solution, cross JDBC driver
statement . setString ( idx + 1 , event [ i ] . iso8601 ( ) )
Fix toString method of LogStash::Timestamp
According to LogStash::Timestamp (https://github.com/elastic/logstash/blob/bb30cc773bcf34d63d634dbfcf2803278a5140b3/logstash-core-event/lib/logstash/timestamp.rb) doesn't support iso8601, which results in error if the timestamp of logstash is used directly.
If should support to_s of to_iso8601.
:message=>"Failed to flush outgoing items", :outgoing_count=>1, :exception=>"NoMethodError", :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:255:in `add_statement_event_params'", "org/jruby/RubyArray.java:1613:in `each'", "org/jruby/RubyEnumerable.java:974:in `each_with_index'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:251:in `add_statement_event_params'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:203:in `safe_flush'", "org/jruby/RubyArray.java:1613:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:200:in `safe_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:120:in `flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:219:in `buffer_flush'", "org/jruby/RubyHash.java:1342:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:216:in `buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:159:in `buffer_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-jdbc-0.2.3/lib/logstash/outputs/jdbc.rb:113:in `receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/outputs/base.rb:83:in `multi_receive'", "org/jruby/RubyArray.java:1613:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/outputs/base.rb:83:in `multi_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/output_delegator.rb:130:in `worker_multi_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/output_delegator.rb:114:in `multi_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:305:in `output_batch'", "org/jruby/RubyHash.java:1342:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:305:in `output_batch'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:236:in `worker_loop'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.0-java/lib/logstash/pipeline.rb:205:in `start_workers'"], :level=>:warn}
2016-04-11 13:19:48 +00:00
when LogStash :: Timestamp
# Most reliable solution, cross JDBC driver
statement . setString ( idx + 1 , event [ i ] . to_iso8601 ( ) )
2015-11-15 12:35:57 +00:00
when Fixnum , Integer
statement . setInt ( idx + 1 , event [ i ] )
when Float
statement . setFloat ( idx + 1 , event [ i ] )
when String
statement . setString ( idx + 1 , event [ i ] )
when true
statement . setBoolean ( idx + 1 , true )
when false
statement . setBoolean ( idx + 1 , false )
else
2015-11-18 10:06:11 +00:00
if event [ i ] . nil? and i =~ / % \ { /
statement . setString ( idx + 1 , event . sprintf ( i ) )
else
statement . setString ( idx + 1 , nil )
end
2015-11-15 12:35:57 +00:00
end
end
statement
end
2015-11-17 10:32:16 +00:00
def log_jdbc_exception ( e )
ce = e
loop do
@logger . error ( " JDBC Exception encountered: Will automatically retry. " , :exception = > ce )
ce = e . getNextException ( )
break if ce == nil
end
end
2014-04-15 11:32:41 +00:00
end # class LogStash::Outputs::jdbc