Fixes up README and adds tested support to buffering

This commit is contained in:
Karl Southern 2014-06-01 13:16:33 +01:00
parent f58b55f8ed
commit 093b2bbb92
2 changed files with 45 additions and 22 deletions

View File

@ -1,26 +1,55 @@
logstash-jdbc # logstash-jdbc
=============
JDBC output plugin for Logstash. JDBC output plugin for Logstash.
This plugin is provided as an external plugin and is not part of the Logstash project. This plugin is provided as an external plugin and is not part of the Logstash project.
Warning ## Warning
-------
This has not yet been extensively tested with all JDBC drivers and may not yet work for you. This has not yet been extensively tested with all JDBC drivers and may not yet work for you.
Installation ## Installation
------------
- Copy lib directory contents into your logstash installation. - Copy lib directory contents into your logstash installation.
- Create the directory vendor/jar/jdbc in your logstash installation (`mkdir -p vendor/jar/jdbc/`)
- Add JDBC jar files to vendor/jar/jdbc in your logstash installation - Add JDBC jar files to vendor/jar/jdbc in your logstash installation
- Configure - Configure
Example configuration ## Configuration options
--------------------- * driver_class, string, JDBC driver class to load
* connection_string, string, JDBC connection string
* statement, array, an array of strings representing the SQL statement to run. Index 0 is the SQL statement that is prepared, all other array entries are passed in as parameters (in order). See example configurations below.
* flush_size, number, default = 1000, number of entries to buffer before sending to SQL
* idle_flush_time, number, default = 1, number of idle seconds before sending data to SQL, even if the flush_size has not been reached
## Example configurations
### SQLite3
* Tested using https://bitbucket.org/xerial/sqlite-jdbc
* SQLite setup - `echo "CREATE table log (host text, timestamp datetime, message text);" | sqlite3 test.db`
``` ```
input
{
stdin { }
}
output {
stdout { }
jdbc {
driver_class => 'org.sqlite.JDBC'
connection_string => 'jdbc:sqlite:test.db'
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "%{host}", "%{@timestamp}", "%{message}" ]
}
}
```
### SQL Server
* Tested using http://msdn.microsoft.com/en-gb/sqlserver/aa937724.aspx
```
input
{
stdin { }
}
output { output {
jdbc { jdbc {
driver_class => 'com.microsoft.sqlserver.jdbc.SQLServerDriver' driver_class => 'com.microsoft.sqlserver.jdbc.SQLServerDriver'
connection_string => "jdbc:sqlserver://server:1433;databaseName=databasename;user=username;password=password;autoReconnect=true;" connection_string => "jdbc:sqlserver://server:1433;databaseName=databasename;user=username;password=password;autoReconnect=true;"
statement => [ "INSERT INTO filezilla (host, connection_id, timestamp, username, client, command) VALUES(?, ?, ?, ?, ?, ?)", "%{host}", "%{connection_id}", "%{timestamp}", "%{username}", "%{client}", "%{command}" ] statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "%{host}", "%{@timestamp}", "%{message}" ]
} }
} }
``` ```

View File

@ -19,11 +19,9 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
# [ "insert into table (message) values(?)", "%{message}" ] # [ "insert into table (message) values(?)", "%{message}" ]
config :statement, :validate => :array, :required => true config :statement, :validate => :array, :required => true
# This plugin uses the bulk index api for improved performance. # We buffer a certain number of events before flushing that out to SQL.
# To make efficient bulk insert calls, we will buffer a certain number of # This setting controls how many events will be buffered before sending a
# events before flushing that out to SQL. This setting # batch of events.
# controls how many events will be buffered before sending a batch
# of events.
config :flush_size, :validate => :number, :default => 1000 config :flush_size, :validate => :number, :default => 1000
# The amount of time since last flush before a flush is forced. # The amount of time since last flush before a flush is forced.
@ -83,17 +81,13 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
end end
begin begin
@logger.debug("Sending SQL to server", :event => event, :sql => statement.toString()) @logger.debug("Sending SQL to server", :sql => statement.toString())
statement.executeBatch() statement.executeBatch()
rescue Exception => e rescue => e
@logger.error("JDBC Exception", :exception => e)
# Raising an exception will incur a retry from Stud::Buffer. # Raising an exception will incur a retry from Stud::Buffer.
# Since the exceutebatch failed this should mean any events failed to be # Since the exceutebatch failed this should mean any events failed to be
# inserted will be re-run. # inserted will be re-run. We're going to log it for the lols anyway.
# We're only capturing the exception so we can pass it to the logger, log @logger.error("JDBC Exception", :exception => e)
# it and then re-raise it.
raise Exception.new("JDBC - Flush failed - #{e.message}")
end end
statement.close() statement.close()