Compare commits
61 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
667e066d74 | ||
|
|
8e15bc5f45 | ||
|
|
43142287de | ||
|
|
2a6f048fa0 | ||
|
|
318c1bd86a | ||
|
|
3085606eb7 | ||
|
|
f0d88a237f | ||
|
|
ca1c71ea68 | ||
|
|
cb4cefdfad | ||
|
|
44e1947f31 | ||
|
|
64a6bcfd55 | ||
|
|
238ef153e4 | ||
|
|
3bdd8ef3a8 | ||
|
|
9164605aae | ||
|
|
d2f99b05d2 | ||
|
|
c110bdd551 | ||
|
|
b3a6de6340 | ||
|
|
37631a62b7 | ||
|
|
76e0f439a0 | ||
|
|
43eb5d969d | ||
|
|
0f37792177 | ||
|
|
0e2e883cd1 | ||
|
|
34708157f4 | ||
|
|
6c852d21dc | ||
|
|
53eaee001d | ||
|
|
fe131f750e | ||
|
|
f04e00019b | ||
|
|
867fd37805 | ||
|
|
b3e8d1a0f8 | ||
|
|
25d14f2624 | ||
|
|
ab566ee969 | ||
|
|
7d699e400c | ||
|
|
542003e4e5 | ||
|
|
290aa63d2d | ||
|
|
a61dd21046 | ||
|
|
4a573cb599 | ||
|
|
80560f6692 | ||
|
|
2c00c5d016 | ||
|
|
a26b6106d4 | ||
|
|
d6869f594c | ||
|
|
5ec985b0df | ||
|
|
5a1fdb7c7f | ||
|
|
b14d61ccf0 | ||
|
|
baaeba3c07 | ||
|
|
d362e791e5 | ||
|
|
5f0f897114 | ||
|
|
721e128f29 | ||
|
|
fe2e23ac27 | ||
|
|
85b3f31051 | ||
|
|
e32b6e9bbd | ||
|
|
f1202f6454 | ||
|
|
26a32079f1 | ||
|
|
8d27e0f90d | ||
|
|
df811f3d29 | ||
|
|
d056093ab8 | ||
|
|
e83af287f0 | ||
|
|
0ff6f16ec7 | ||
|
|
e6e9ac3b04 | ||
|
|
707c005979 | ||
|
|
8f5ceb451a | ||
|
|
e6537d053f |
26
.github/ISSUE_TEMPLATE.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
<!--
|
||||||
|
|
||||||
|
Trouble installing the plugin under Logstash 2.4.0 with the message "duplicate gems"? See https://github.com/elastic/logstash/issues/5852
|
||||||
|
|
||||||
|
Please remember:
|
||||||
|
- I have not used every database engine in the world
|
||||||
|
- I have not got access to every database engine in the world
|
||||||
|
- Any support I provide is done in my own personal time which is limited
|
||||||
|
- Understand that I won't always have the answer immediately
|
||||||
|
|
||||||
|
Please provide as much information as possible.
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!--- Provide a general summary of the issue in the Title above -->
|
||||||
|
|
||||||
|
## Expected & Actual Behavior
|
||||||
|
<!--- If you're describing a bug, tell us what should happen, and what is actually happening, and if necessary how to reproduce it -->
|
||||||
|
<!--- If you're suggesting a change/improvement, tell us how it should work -->
|
||||||
|
|
||||||
|
## Your Environment
|
||||||
|
<!--- Include as many relevant details about the environment you experienced the bug in -->
|
||||||
|
* Version of plugin used:
|
||||||
|
* Version of Logstash used:
|
||||||
|
* Database engine & version you're connecting to:
|
||||||
|
* Have you checked you've met the Logstash requirements for Java versions?:
|
||||||
25
.rubocop.yml
Normal file
25
.rubocop.yml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# I don't care for underscores in numbers.
|
||||||
|
Style/NumericLiterals:
|
||||||
|
Enabled: false
|
||||||
|
|
||||||
|
Style/ClassAndModuleChildren:
|
||||||
|
Enabled: false
|
||||||
|
|
||||||
|
Metrics/AbcSize:
|
||||||
|
Enabled: false
|
||||||
|
|
||||||
|
Metrics/CyclomaticComplexity:
|
||||||
|
Max: 9
|
||||||
|
|
||||||
|
Metrics/PerceivedComplexity:
|
||||||
|
Max: 10
|
||||||
|
|
||||||
|
Metrics/LineLength:
|
||||||
|
Enabled: false
|
||||||
|
|
||||||
|
Metrics/MethodLength:
|
||||||
|
Max: 50
|
||||||
|
|
||||||
|
Style/FileName:
|
||||||
|
Exclude:
|
||||||
|
- 'lib/logstash-output-jdbc_jars.rb'
|
||||||
@@ -2,7 +2,9 @@ sudo: required
|
|||||||
language: ruby
|
language: ruby
|
||||||
cache: bundler
|
cache: bundler
|
||||||
rvm:
|
rvm:
|
||||||
- jruby
|
- jruby-1.7.25
|
||||||
|
jdk:
|
||||||
|
- oraclejdk8
|
||||||
before_script:
|
before_script:
|
||||||
- bundle exec rake vendor
|
- bundle exec rake vendor
|
||||||
- bundle exec rake install_jars
|
- bundle exec rake install_jars
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ See CHANGELOG.md
|
|||||||
Released versions are available via rubygems, and typically tagged.
|
Released versions are available via rubygems, and typically tagged.
|
||||||
|
|
||||||
For development:
|
For development:
|
||||||
- See master branch for logstash v5 (currently **development only**)
|
- See master branch for logstash v5
|
||||||
- See v2.x branch for logstash v2
|
- See v2.x branch for logstash v2
|
||||||
- See v1.5 branch for logstash v1.5
|
- See v1.5 branch for logstash v1.5
|
||||||
- See v1.4 branch for logstash 1.4
|
- See v1.4 branch for logstash 1.4
|
||||||
@@ -47,8 +47,8 @@ For development:
|
|||||||
| username | String | JDBC username - this is optional as it may be included in the connection string, for many drivers | No | |
|
| username | String | JDBC username - this is optional as it may be included in the connection string, for many drivers | No | |
|
||||||
| password | String | JDBC password - this is optional as it may be included in the connection string, for many drivers | No | |
|
| password | String | JDBC password - this is optional as it may be included in the connection string, for many drivers | No | |
|
||||||
| statement | Array | An array of strings representing the SQL statement to run. Index 0 is the SQL statement that is prepared, all other array entries are passed in as parameters (in order). A parameter may either be a property of the event (i.e. "@timestamp", or "host") or a formatted string (i.e. "%{host} - %{message}" or "%{message}"). If a key is passed then it will be automatically converted as required for insertion into SQL. If it's a formatted string then it will be passed in verbatim. | Yes | |
|
| statement | Array | An array of strings representing the SQL statement to run. Index 0 is the SQL statement that is prepared, all other array entries are passed in as parameters (in order). A parameter may either be a property of the event (i.e. "@timestamp", or "host") or a formatted string (i.e. "%{host} - %{message}" or "%{message}"). If a key is passed then it will be automatically converted as required for insertion into SQL. If it's a formatted string then it will be passed in verbatim. | Yes | |
|
||||||
| unsafe_statement | Boolean | If yes, the statement is evaluated for event fields - this allows you to use dynamic table names, etc. **This is highly dangerous** and you should **not** use this unless you are 100% sure that the field(s) you are passing in are 100% safe. Failure to do so will result in possible SQL injections. Please be aware that there is also a potential performance penalty as each event must be evaluated and inserted into SQL one at a time, where as when this is false multiple events are inserted at once. Example statement: [ "insert into %{table_name_field} (column) values(?)", "fieldname" ] | No | False |
|
| unsafe_statement | Boolean | If yes, the statement is evaluated for event fields - this allows you to use dynamic table names, etc. **This is highly dangerous** and you should **not** use this unless you are 100% sure that the field(s) you are passing in are 100% safe. Failure to do so will result in possible SQL injections. Example statement: [ "insert into %{table_name_field} (column) values(?)", "fieldname" ] | No | False |
|
||||||
| max_pool_size | Number | Maximum number of connections to open to the SQL server at any 1 time. Default set to same as Logstash default number of workers | No | 24 |
|
| max_pool_size | Number | Maximum number of connections to open to the SQL server at any 1 time | No | 5 |
|
||||||
| connection_timeout | Number | Number of seconds before a SQL connection is closed | No | 2800 |
|
| connection_timeout | Number | Number of seconds before a SQL connection is closed | No | 2800 |
|
||||||
| flush_size | Number | Maximum number of entries to buffer before sending to SQL - if this is reached before idle_flush_time | No | 1000 |
|
| flush_size | Number | Maximum number of entries to buffer before sending to SQL - if this is reached before idle_flush_time | No | 1000 |
|
||||||
| max_flush_exceptions | Number | Number of sequential flushes which cause an exception, before the set of events are discarded. Set to a value less than 1 if you never want it to stop. This should be carefully configured with respect to retry_initial_interval and retry_max_interval, if your SQL server is not highly available | No | 10 |
|
| max_flush_exceptions | Number | Number of sequential flushes which cause an exception, before the set of events are discarded. Set to a value less than 1 if you never want it to stop. This should be carefully configured with respect to retry_initial_interval and retry_max_interval, if your SQL server is not highly available | No | 10 |
|
||||||
|
|||||||
18
Vagrantfile
vendored
Normal file
18
Vagrantfile
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# -*- mode: ruby -*-
|
||||||
|
# vi: set ft=ruby :
|
||||||
|
|
||||||
|
Vagrant.configure(2) do |config|
|
||||||
|
config.vm.box = 'debian/jessie64'
|
||||||
|
config.vm.synced_folder '.', '/vagrant', type: :virtualbox
|
||||||
|
|
||||||
|
config.vm.provision 'shell', inline: <<-EOP
|
||||||
|
echo "deb http://ftp.debian.org/debian jessie-backports main" | tee --append /etc/apt/sources.list > /dev/null
|
||||||
|
sed -i 's/main/main contrib non-free/g' /etc/apt/sources.list
|
||||||
|
apt-get update
|
||||||
|
apt-get remove openjdk-7-jre-headless -y -q
|
||||||
|
apt-get install git openjdk-8-jre curl -y -q
|
||||||
|
gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3
|
||||||
|
curl -sSL https://get.rvm.io | bash -s stable --ruby=jruby-1.7
|
||||||
|
usermod -a -G rvm vagrant
|
||||||
|
EOP
|
||||||
|
end
|
||||||
18
examples/cockroachdb.md
Normal file
18
examples/cockroachdb.md
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Example: CockroachDB
|
||||||
|
- Tested using postgresql-9.4.1209.jre6.jar
|
||||||
|
- **Warning** cockroach is known to throw a warning on connection test (at time of writing), thus the connection test is explicitly disabled.
|
||||||
|
|
||||||
|
```
|
||||||
|
input
|
||||||
|
{
|
||||||
|
stdin { }
|
||||||
|
}
|
||||||
|
output {
|
||||||
|
jdbc {
|
||||||
|
driver_jar_path => '/opt/postgresql-9.4.1209.jre6.jar'
|
||||||
|
connection_test => false
|
||||||
|
connection_string => 'jdbc:postgresql://127.0.0.1:26257/test?user=root'
|
||||||
|
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST (? AS timestamp), ?)", "host", "@timestamp", "message" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
@@ -9,8 +9,9 @@ input
|
|||||||
}
|
}
|
||||||
output {
|
output {
|
||||||
jdbc {
|
jdbc {
|
||||||
|
driver_class => "com.mysql.jdbc.Driver"
|
||||||
connection_string => "jdbc:mysql://HOSTNAME/DATABASE?user=USER&password=PASSWORD"
|
connection_string => "jdbc:mysql://HOSTNAME/DATABASE?user=USER&password=PASSWORD"
|
||||||
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST (? AS timestamp), ?)", "host", "@timestamp", "message" ]
|
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, CAST(? AS timestamp), ?)", "host", "@timestamp", "message" ]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
# Example: SQL Server
|
# Example: SQL Server
|
||||||
* Tested using http://msdn.microsoft.com/en-gb/sqlserver/aa937724.aspx
|
* Tested using http://msdn.microsoft.com/en-gb/sqlserver/aa937724.aspx
|
||||||
|
* Known to be working with Microsoft SQL Server Always-On Cluster (see https://github.com/theangryangel/logstash-output-jdbc/issues/37). With thanks to [@phr0gz](https://github.com/phr0gz)
|
||||||
```
|
```
|
||||||
input
|
input
|
||||||
{
|
{
|
||||||
@@ -7,7 +8,7 @@ input
|
|||||||
}
|
}
|
||||||
output {
|
output {
|
||||||
jdbc {
|
jdbc {
|
||||||
connection_string => "jdbc:sqlserver://server:1433;databaseName=databasename;user=username;password=password;autoReconnect=true;"
|
connection_string => "jdbc:sqlserver://server:1433;databaseName=databasename;user=username;password=password"
|
||||||
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ]
|
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ output {
|
|||||||
stdout { }
|
stdout { }
|
||||||
|
|
||||||
jdbc {
|
jdbc {
|
||||||
|
driver_class => "org.sqlite.JDBC"
|
||||||
connection_string => 'jdbc:sqlite:test.db'
|
connection_string => 'jdbc:sqlite:test.db'
|
||||||
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ]
|
statement => [ "INSERT INTO log (host, timestamp, message) VALUES(?, ?, ?)", "host", "@timestamp", "message" ]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
require 'logstash/environment'
|
require 'logstash/environment'
|
||||||
|
|
||||||
root_dir = File.expand_path(File.join(File.dirname(__FILE__), ".."))
|
root_dir = File.expand_path(File.join(File.dirname(__FILE__), '..'))
|
||||||
LogStash::Environment.load_runtime_jars! File.join(root_dir, "vendor")
|
LogStash::Environment.load_runtime_jars! File.join(root_dir, 'vendor')
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ require 'logstash-output-jdbc_jars'
|
|||||||
# includes correctly crafting the SQL statement, and matching the number of
|
# includes correctly crafting the SQL statement, and matching the number of
|
||||||
# parameters correctly.
|
# parameters correctly.
|
||||||
class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
||||||
declare_threadsafe! if self.respond_to?(:declare_threadsafe!)
|
concurrency :shared
|
||||||
|
|
||||||
STRFTIME_FMT = '%Y-%m-%d %T.%L'.freeze
|
STRFTIME_FMT = '%Y-%m-%d %T.%L'.freeze
|
||||||
|
|
||||||
@@ -98,7 +98,6 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
def register
|
def register
|
||||||
@logger.info('JDBC - Starting up')
|
@logger.info('JDBC - Starting up')
|
||||||
|
|
||||||
LogStash::Logger.setup_log4j(@logger)
|
|
||||||
load_jar_files!
|
load_jar_files!
|
||||||
|
|
||||||
@stopping = Concurrent::AtomicBoolean.new(false)
|
@stopping = Concurrent::AtomicBoolean.new(false)
|
||||||
@@ -122,10 +121,6 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def receive(event)
|
|
||||||
retrying_submit([event])
|
|
||||||
end
|
|
||||||
|
|
||||||
def close
|
def close
|
||||||
@stopping.make_true
|
@stopping.make_true
|
||||||
@pool.close
|
@pool.close
|
||||||
@@ -156,7 +151,7 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
# Test connection
|
# Test connection
|
||||||
test_connection = @pool.getConnection
|
test_connection = @pool.getConnection
|
||||||
unless test_connection.isValid(validate_connection_timeout)
|
unless test_connection.isValid(validate_connection_timeout)
|
||||||
@logger.error('JDBC - Connection is not reporting as validate. Either connection is invalid, or driver is not getting the appropriate response.')
|
@logger.warn('JDBC - Connection is not reporting as validate. Either connection is invalid, or driver is not getting the appropriate response.')
|
||||||
end
|
end
|
||||||
test_connection.close
|
test_connection.close
|
||||||
end
|
end
|
||||||
@@ -177,13 +172,13 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
File.join(File.dirname(__FILE__), '../../../vendor/jar/jdbc/*.jar')
|
File.join(File.dirname(__FILE__), '../../../vendor/jar/jdbc/*.jar')
|
||||||
end
|
end
|
||||||
|
|
||||||
@logger.debug('JDBC - jarpath', path: jarpath)
|
@logger.trace('JDBC - jarpath', path: jarpath)
|
||||||
|
|
||||||
jars = Dir[jarpath]
|
jars = Dir[jarpath]
|
||||||
raise LogStash::ConfigurationError, 'JDBC - No jars found. Have you read the README?' if jars.empty?
|
raise LogStash::ConfigurationError, 'JDBC - No jars found. Have you read the README?' if jars.empty?
|
||||||
|
|
||||||
jars.each do |jar|
|
jars.each do |jar|
|
||||||
@logger.debug('JDBC - Loaded jar', jar: jar)
|
@logger.trace('JDBC - Loaded jar', jar: jar)
|
||||||
require jar
|
require jar
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -258,7 +253,7 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
def add_statement_event_params(statement, event)
|
def add_statement_event_params(statement, event)
|
||||||
@statement[1..-1].each_with_index do |i, idx|
|
@statement[1..-1].each_with_index do |i, idx|
|
||||||
if i.is_a? String
|
if i.is_a? String
|
||||||
value = event[i]
|
value = event.get(i)
|
||||||
if value.nil? and i =~ /%\{/
|
if value.nil? and i =~ /%\{/
|
||||||
value = event.sprintf(i)
|
value = event.sprintf(i)
|
||||||
end
|
end
|
||||||
@@ -280,7 +275,11 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
# strftime appears to be the most reliable across drivers.
|
# strftime appears to be the most reliable across drivers.
|
||||||
statement.setString(idx + 1, value.time.strftime(STRFTIME_FMT))
|
statement.setString(idx + 1, value.time.strftime(STRFTIME_FMT))
|
||||||
when Fixnum, Integer
|
when Fixnum, Integer
|
||||||
|
if value > 2147483647 or value < -2147483648
|
||||||
|
statement.setLong(idx + 1, value)
|
||||||
|
else
|
||||||
statement.setInt(idx + 1, value)
|
statement.setInt(idx + 1, value)
|
||||||
|
end
|
||||||
when Float
|
when Float
|
||||||
statement.setFloat(idx + 1, value)
|
statement.setFloat(idx + 1, value)
|
||||||
when String
|
when String
|
||||||
@@ -308,7 +307,7 @@ class LogStash::Outputs::Jdbc < LogStash::Outputs::Base
|
|||||||
log_method = (retrying ? 'warn' : 'error')
|
log_method = (retrying ? 'warn' : 'error')
|
||||||
|
|
||||||
loop do
|
loop do
|
||||||
@logger.send(log_method, log_text, :exception => current_exception, :backtrace => current_exception.backtrace)
|
@logger.send(log_method, log_text, :exception => current_exception)
|
||||||
|
|
||||||
if current_exception.respond_to? 'getNextException'
|
if current_exception.respond_to? 'getNextException'
|
||||||
current_exception = current_exception.getNextException()
|
current_exception = current_exception.getNextException()
|
||||||
|
|||||||
17
log4j2.xml
Normal file
17
log4j2.xml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<Configuration>
|
||||||
|
<Appenders>
|
||||||
|
<File name="file" fileName="log4j2.log">
|
||||||
|
<PatternLayout pattern="%d{yyyy-mm-dd HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
|
||||||
|
</File>
|
||||||
|
</Appenders>
|
||||||
|
|
||||||
|
<Loggers>
|
||||||
|
<!-- If we need to figure out whats happening for development purposes, disable this -->
|
||||||
|
<Logger name="com.zaxxer.hikari" level="off" />
|
||||||
|
|
||||||
|
<Root level="debug">
|
||||||
|
<AppenderRef ref="file"/>
|
||||||
|
</Root>
|
||||||
|
</Loggers>
|
||||||
|
</Configuration>
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
Gem::Specification.new do |s|
|
Gem::Specification.new do |s|
|
||||||
s.name = 'logstash-output-jdbc'
|
s.name = 'logstash-output-jdbc'
|
||||||
s.version = "0.3.1"
|
s.version = '5.0.0'
|
||||||
s.licenses = [ "Apache License (2.0)" ]
|
s.licenses = ['Apache License (2.0)']
|
||||||
s.summary = "This plugin allows you to output to SQL, via JDBC"
|
s.summary = 'This plugin allows you to output to SQL, via JDBC'
|
||||||
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-jdbc'. This gem is not a stand-alone program"
|
||||||
s.authors = ["the_angry_angel"]
|
s.authors = ['the_angry_angel']
|
||||||
s.email = "karl+github@theangryangel.co.uk"
|
s.email = 'karl+github@theangryangel.co.uk'
|
||||||
s.homepage = "https://github.com/theangryangel/logstash-output-jdbc"
|
s.homepage = 'https://github.com/theangryangel/logstash-output-jdbc'
|
||||||
s.require_paths = [ "lib" ]
|
s.require_paths = ['lib']
|
||||||
|
|
||||||
# Java only
|
# Java only
|
||||||
s.platform = 'java'
|
s.platform = 'java'
|
||||||
@@ -19,14 +19,14 @@ Gem::Specification.new do |s|
|
|||||||
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
||||||
|
|
||||||
# Special flag to let us know this is actually a logstash plugin
|
# Special flag to let us know this is actually a logstash plugin
|
||||||
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'output' }
|
||||||
|
|
||||||
# Gem dependencies
|
# Gem dependencies
|
||||||
s.add_runtime_dependency 'logstash-core-plugin-api', '~> 1.0'
|
s.add_runtime_dependency 'logstash-core-plugin-api', '>= 1.60', '<= 2.99'
|
||||||
s.add_runtime_dependency 'stud'
|
s.add_runtime_dependency 'stud'
|
||||||
s.add_runtime_dependency 'logstash-codec-plain'
|
s.add_runtime_dependency 'logstash-codec-plain'
|
||||||
|
|
||||||
s.requirements << "jar 'com.zaxxer:HikariCP', '2.4.2'"
|
s.requirements << "jar 'com.zaxxer:HikariCP', '2.4.7'"
|
||||||
s.requirements << "jar 'org.slf4j:slf4j-log4j12', '1.7.21'"
|
s.requirements << "jar 'org.slf4j:slf4j-log4j12', '1.7.21'"
|
||||||
|
|
||||||
s.add_development_dependency 'jar-dependencies'
|
s.add_development_dependency 'jar-dependencies'
|
||||||
|
|||||||
19
scripts/minutes_to_retries.rb
Executable file
19
scripts/minutes_to_retries.rb
Executable file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/usr/bin/env ruby -w
|
||||||
|
|
||||||
|
seconds_to_reach = 10 * 60
|
||||||
|
retry_max_interval = 128
|
||||||
|
|
||||||
|
current_interval = 2
|
||||||
|
total_interval = 0
|
||||||
|
exceptions_count = 1
|
||||||
|
|
||||||
|
loop do
|
||||||
|
break if total_interval > seconds_to_reach
|
||||||
|
exceptions_count += 1
|
||||||
|
|
||||||
|
current_interval = current_interval * 2 > retry_max_interval ? retry_max_interval : current_interval * 2
|
||||||
|
|
||||||
|
total_interval += current_interval
|
||||||
|
end
|
||||||
|
|
||||||
|
puts exceptions_count
|
||||||
@@ -4,6 +4,32 @@ require 'stud/temporary'
|
|||||||
require 'java'
|
require 'java'
|
||||||
require 'securerandom'
|
require 'securerandom'
|
||||||
|
|
||||||
|
RSpec.configure do |c|
|
||||||
|
|
||||||
|
def start_service(name)
|
||||||
|
cmd = "sudo /etc/init.d/#{name}* start"
|
||||||
|
|
||||||
|
`which systemctl`
|
||||||
|
if $?.success?
|
||||||
|
cmd = "sudo systemctl start #{name}"
|
||||||
|
end
|
||||||
|
|
||||||
|
`#{cmd}`
|
||||||
|
end
|
||||||
|
|
||||||
|
def stop_service(name)
|
||||||
|
cmd = "sudo /etc/init.d/#{name}* stop"
|
||||||
|
|
||||||
|
`which systemctl`
|
||||||
|
if $?.success?
|
||||||
|
cmd = "sudo systemctl stop #{name}"
|
||||||
|
end
|
||||||
|
|
||||||
|
`#{cmd}`
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
RSpec.shared_context 'rspec setup' do
|
RSpec.shared_context 'rspec setup' do
|
||||||
it 'ensure jar is available' do
|
it 'ensure jar is available' do
|
||||||
expect(ENV[jdbc_jar_env]).not_to be_nil, "#{jdbc_jar_env} not defined, required to run tests"
|
expect(ENV[jdbc_jar_env]).not_to be_nil, "#{jdbc_jar_env} not defined, required to run tests"
|
||||||
@@ -20,7 +46,9 @@ RSpec.shared_context 'when initializing' do
|
|||||||
end
|
end
|
||||||
|
|
||||||
RSpec.shared_context 'when outputting messages' do
|
RSpec.shared_context 'when outputting messages' do
|
||||||
let(:logger) { double("logger") }
|
let(:logger) {
|
||||||
|
double("logger")
|
||||||
|
}
|
||||||
|
|
||||||
let(:jdbc_test_table) do
|
let(:jdbc_test_table) do
|
||||||
'logstash_output_jdbc_test'
|
'logstash_output_jdbc_test'
|
||||||
@@ -31,11 +59,11 @@ RSpec.shared_context 'when outputting messages' do
|
|||||||
end
|
end
|
||||||
|
|
||||||
let(:jdbc_create_table) do
|
let(:jdbc_create_table) do
|
||||||
"CREATE table #{jdbc_test_table} (created_at datetime not null, message varchar(512) not null, message_sprintf varchar(512) not null, static_int int not null, static_bit bit not null)"
|
"CREATE table #{jdbc_test_table} (created_at datetime not null, message varchar(512) not null, message_sprintf varchar(512) not null, static_int int not null, static_bit bit not null, static_bigint bigint not null)"
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:jdbc_statement) do
|
let(:jdbc_statement) do
|
||||||
["insert into #{jdbc_test_table} (created_at, message, message_sprintf, static_int, static_bit) values(?, ?, ?, ?, ?)", '@timestamp', 'message', 'sprintf-%{message}', 1, true]
|
["insert into #{jdbc_test_table} (created_at, message, message_sprintf, static_int, static_bit, static_bigint) values(?, ?, ?, ?, ?, ?)", '@timestamp', 'message', 'sprintf-%{message}', 1, true, 4000881632477184]
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:systemd_database_service) do
|
let(:systemd_database_service) do
|
||||||
@@ -43,16 +71,27 @@ RSpec.shared_context 'when outputting messages' do
|
|||||||
end
|
end
|
||||||
|
|
||||||
let(:event_fields) do
|
let(:event_fields) do
|
||||||
{ 'message' => "test-message #{SecureRandom.uuid}" }
|
{ message: "test-message #{SecureRandom.uuid}" }
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:event) { LogStash::Event.new(event_fields) }
|
let(:event) { LogStash::Event.new(event_fields) }
|
||||||
|
|
||||||
let(:plugin) do
|
let(:plugin) do
|
||||||
|
# Setup logger
|
||||||
|
allow(LogStash::Outputs::Jdbc).to receive(:logger).and_return(logger)
|
||||||
|
|
||||||
|
# XXX: Suppress reflection logging. There has to be a better way around this.
|
||||||
|
allow(logger).to receive(:debug).with(/config LogStash::/)
|
||||||
|
|
||||||
|
# Suppress beta warnings.
|
||||||
|
allow(logger).to receive(:info).with(/Please let us know if you find bugs or have suggestions on how to improve this plugin./)
|
||||||
|
|
||||||
|
# Suppress start up messages.
|
||||||
|
expect(logger).to receive(:info).once.with(/JDBC - Starting up/)
|
||||||
|
|
||||||
# Setup plugin
|
# Setup plugin
|
||||||
output = LogStash::Plugin.lookup('output', 'jdbc').new(jdbc_settings)
|
output = LogStash::Plugin.lookup('output', 'jdbc').new(jdbc_settings)
|
||||||
output.register
|
output.register
|
||||||
output.logger = logger
|
|
||||||
|
|
||||||
# Setup table
|
# Setup table
|
||||||
c = output.instance_variable_get(:@pool).getConnection
|
c = output.instance_variable_get(:@pool).getConnection
|
||||||
@@ -82,7 +121,7 @@ RSpec.shared_context 'when outputting messages' do
|
|||||||
# Verify the number of items in the output table
|
# Verify the number of items in the output table
|
||||||
c = plugin.instance_variable_get(:@pool).getConnection
|
c = plugin.instance_variable_get(:@pool).getConnection
|
||||||
stmt = c.prepareStatement("select count(*) as total from #{jdbc_test_table} where message = ?")
|
stmt = c.prepareStatement("select count(*) as total from #{jdbc_test_table} where message = ?")
|
||||||
stmt.setString(1, event['message'])
|
stmt.setString(1, event.get('message'))
|
||||||
rs = stmt.executeQuery
|
rs = stmt.executeQuery
|
||||||
count = 0
|
count = 0
|
||||||
count = rs.getInt('total') while rs.next
|
count = rs.getInt('total') while rs.next
|
||||||
@@ -93,10 +132,14 @@ RSpec.shared_context 'when outputting messages' do
|
|||||||
end
|
end
|
||||||
|
|
||||||
it 'should not save event, and log an unretryable exception' do
|
it 'should not save event, and log an unretryable exception' do
|
||||||
e = LogStash::Event.new({})
|
e = event
|
||||||
|
original_event = e.get('message')
|
||||||
|
e.set('message', nil)
|
||||||
|
|
||||||
expect(logger).to receive(:error).once.with(/JDBC - Exception. Not retrying/, Hash)
|
expect(logger).to receive(:error).once.with(/JDBC - Exception. Not retrying/, Hash)
|
||||||
expect { plugin.multi_receive([e]) }.to_not raise_error
|
expect { plugin.multi_receive([event]) }.to_not raise_error
|
||||||
|
|
||||||
|
e.set('message', original_event)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'it should retry after a connection loss, and log a warning' do
|
it 'it should retry after a connection loss, and log a warning' do
|
||||||
@@ -107,29 +150,21 @@ RSpec.shared_context 'when outputting messages' do
|
|||||||
# Check that everything is fine right now
|
# Check that everything is fine right now
|
||||||
expect { p.multi_receive([event]) }.not_to raise_error
|
expect { p.multi_receive([event]) }.not_to raise_error
|
||||||
|
|
||||||
# Start a thread to stop and restart the service.
|
stop_service(systemd_database_service)
|
||||||
|
|
||||||
|
# Start a thread to restart the service after the fact.
|
||||||
t = Thread.new(systemd_database_service) { |systemd_database_service|
|
t = Thread.new(systemd_database_service) { |systemd_database_service|
|
||||||
start_stop_cmd = 'sudo /etc/init.d/%<service>s* %<action>s'
|
sleep 20
|
||||||
|
|
||||||
`which systemctl`
|
start_service(systemd_database_service)
|
||||||
if $?.success?
|
|
||||||
start_stop_cmd = 'sudo systemctl %<action>s %<service>s'
|
|
||||||
end
|
|
||||||
|
|
||||||
cmd = start_stop_cmd % { action: 'stop', service: systemd_database_service }
|
|
||||||
`#{cmd}`
|
|
||||||
sleep 10
|
|
||||||
|
|
||||||
cmd = start_stop_cmd % { action: 'start', service: systemd_database_service }
|
|
||||||
`#{cmd}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Wait a few seconds to the service to stop
|
t.run
|
||||||
sleep 5
|
|
||||||
|
|
||||||
expect(logger).to receive(:warn).at_least(:once).with(/JDBC - Exception. Retrying/, Hash)
|
expect(logger).to receive(:warn).at_least(:once).with(/JDBC - Exception. Retrying/, Hash)
|
||||||
expect { p.multi_receive([event]) }.to_not raise_error
|
expect { p.multi_receive([event]) }.to_not raise_error
|
||||||
|
|
||||||
|
# Wait for the thread to finish
|
||||||
t.join
|
t.join
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ describe 'logstash-output-jdbc: derby', if: ENV['JDBC_DERBY_JAR'] do
|
|||||||
end
|
end
|
||||||
|
|
||||||
let(:jdbc_create_table) do
|
let(:jdbc_create_table) do
|
||||||
"CREATE table #{jdbc_test_table} (created_at timestamp not null, message varchar(512) not null, message_sprintf varchar(512) not null, static_int int not null, static_bit boolean not null)"
|
"CREATE table #{jdbc_test_table} (created_at timestamp not null, message varchar(512) not null, message_sprintf varchar(512) not null, static_int int not null, static_bit boolean not null, static_bigint bigint not null)"
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:jdbc_settings) do
|
let(:jdbc_settings) do
|
||||||
|
|||||||
Reference in New Issue
Block a user