Initial Commit

This commit is contained in:
Mark Milligan
2021-01-14 16:28:24 -06:00
parent 21c28201c5
commit 1334c110ff
318 changed files with 24160 additions and 0 deletions

View File

@@ -0,0 +1,65 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanternsoftware.util</groupId>
<artifactId>lantern-util-common</artifactId>
<name>lantern-util-common</name>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.8</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.29</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.2</version>
<executions>
<execution>
<goals>
<goal>testCompile</goal>
</goals>
<phase>compile</phase>
</execution>
</executions>
<configuration>
<optimize>true</optimize>
<showDeprecation>true</showDeprecation>
<encoding>UTF-8</encoding>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,935 @@
package com.lanternsoftware.util;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.RandomAccess;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;
public class CollectionUtils {
public static <T> T getFirst(Collection<T> _collObjects) {
if ((_collObjects != null) && !_collObjects.isEmpty())
return _collObjects.iterator().next();
return null;
}
public static <T> T removeFirst(Collection<T> _objects) {
if (_objects == null)
return null;
Iterator<T> iter = _objects.iterator();
if (iter.hasNext()) {
T t = iter.next();
iter.remove();
return t;
}
return null;
}
public static <T> T removeOne(Collection<T> _coll, IQualifier<T> _qualifier) {
if ((_coll == null) || (_qualifier == null))
return null;
Iterator<T> iter = _coll.iterator();
while (iter.hasNext()) {
T t = iter.next();
if (_qualifier.qualifies(t)) {
iter.remove();
return t;
}
}
return null;
}
public static <T> List<T> removeAll(Collection<T> _coll, IQualifier<T> _qualifier) {
if ((_coll == null) || (_qualifier == null))
return null;
List<T> ret = new ArrayList<>();
Iterator<T> iter = _coll.iterator();
while (iter.hasNext()) {
T t = iter.next();
if (_qualifier.qualifies(t)) {
iter.remove();
ret.add(t);
}
}
return ret;
}
public static <T> T getLast(Collection<T> _collObjects) {
if ((_collObjects == null) || _collObjects.isEmpty())
return null;
if (_collObjects instanceof RandomAccess) {
List<T> listObjects = (List<T>) _collObjects;
return listObjects.get(listObjects.size() - 1);
}
if (_collObjects instanceof Deque) {
Deque<T> listObjects = (Deque<T>) _collObjects;
return listObjects.getLast();
}
T t = null;
Iterator<T> iter = _collObjects.iterator();
while (iter.hasNext()) {
t = iter.next();
}
return t;
}
public static <T> T getFirst(T[] _arrObjects) {
if (size(_arrObjects) > 0)
return _arrObjects[0];
return null;
}
public static <T> boolean isEmpty(Collection<T> _collObjects) {
return (_collObjects == null) || _collObjects.isEmpty();
}
public static <T> boolean isNotEmpty(Collection<T> _collObjects) {
return (_collObjects != null) && !_collObjects.isEmpty();
}
public static <T,V> boolean isEmpty(Map<T,V> _map) {
return (_map == null) || _map.isEmpty();
}
public static <T,V> boolean isNotEmpty(Map<T,V> _map) {
return (_map != null) && !_map.isEmpty();
}
public static <T> Collection<T> makeNotNull(Collection<T> _collObjects) {
if (_collObjects != null)
return _collObjects;
return new ArrayList<T>();
}
public static <T> List<T> makeNotNull(List<T> _listObjects) {
if (_listObjects != null)
return _listObjects;
return new ArrayList<T>();
}
public static <T, V> Map<T, V> makeNotNull(Map<T, V> _mapObjects) {
if (_mapObjects != null)
return _mapObjects;
return new HashMap<T, V>();
}
public static int size(Collection<?> _collObjects) {
if (_collObjects == null)
return 0;
return _collObjects.size();
}
public static <T,V> int size(Map<T, V> _collObjects) {
if (_collObjects == null)
return 0;
return _collObjects.size();
}
public static <T> int size(T[] _arr) {
if (_arr == null)
return 0;
return _arr.length;
}
public static <T> T get(List<T> _list, int _idx) {
if (_list == null)
return null;
if ((_idx < 0) || (_idx >= _list.size()))
return null;
return _list.get(_idx);
}
public static <T> T get(T[] _t, int _idx) {
if (_t == null)
return null;
if ((_idx < 0) || (_idx >= _t.length))
return null;
return _t[_idx];
}
public static <T, V> List<V> get(Map<T, V> _map, Collection<T> _keys) {
if (_keys == null)
return null;
List<V> ret = new ArrayList<>();
for (T t : _keys) {
ret.add(_map.get(t));
}
return ret;
}
public static <T> T last(T[] _arr) {
if (_arr == null || _arr.length == 0)
return null;
return _arr[_arr.length - 1];
}
public static <T> boolean contains(Collection<T> _coll, T _t) {
if (_coll == null)
return false;
return _coll.contains(_t);
}
public static <T> boolean containsAny(Collection<T> _coll, T... _t) {
return containsAny(_coll, asArrayList(_t));
}
public static <T> boolean containsAny(Collection<T> _coll, Collection<T> _values) {
if (size(_values) == 0)
return false;
for (T t : _values) {
if (contains(_coll, t))
return true;
}
return false;
}
public static <T> boolean containsAll(Collection<T> _coll, T... _t) {
return containsAll(_coll, asArrayList(_t));
}
public static <T> boolean containsAll(Collection<T> _coll, Collection<T> _values) {
if (size(_values) == 0)
return true;
for (T t : _values) {
if (!contains(_coll, t))
return false;
}
return true;
}
public static <T> boolean containsNone(Collection<T> _coll, T... _t) {
return containsNone(_coll, asArrayList(_t));
}
public static <T> boolean containsNone(Collection<T> _coll, Collection<T> _values) {
if (size(_values) == 0)
return true;
for (T t : _values) {
if (contains(_coll, t))
return false;
}
return true;
}
public static <T> List<T> merge(Collection<List<T>> _colls) {
List<T> list = new ArrayList<>();
for (List<? extends T> coll : makeNotNull(_colls)) {
list.addAll(coll);
}
return list;
}
public static <T> List<T> merge(Collection<? extends T> _coll1, Collection<? extends T> _coll2) {
List<T> list = new ArrayList<>(makeNotNull(_coll1));
list.addAll(makeNotNull(_coll2));
return list;
}
public static <T, V> List<V> aggregate(Collection<T> _coll, IAggregator<T, V> _aggregator) {
List<V> list = new ArrayList<>();
for (T t : makeNotNull(_coll)) {
List<V> vs = _aggregator.aggregate(t);
if (vs != null)
list.addAll(vs);
}
return list;
}
public static <T, V, U> Map<U, V> aggregateToMap(Collection<T> _coll, IAggregator<T, V> _aggregator, ITransformer<V, U> _keyTransformer) {
return transformToMap(aggregate(_coll, _aggregator), _keyTransformer);
}
public static byte[] merge(byte[]... _arrs) {
int iSize = 0;
for (byte[] curArr : _arrs) {
if (curArr != null)
iSize += curArr.length;
}
byte[] arr = new byte[iSize];
int offset = 0;
for (byte[] curArr : _arrs) {
if (curArr == null)
continue;
System.arraycopy(curArr, 0, arr, offset, curArr.length);
offset += curArr.length;
}
return arr;
}
public static <T, V> List<V> getMultiMapList(T _key, Map<T, List<V>> _map) {
List<V> list = _map.get(_key);
if (list == null) {
list = new ArrayList<>();
_map.put(_key, list);
}
return list;
}
public static <T, V> Set<V> getMultiMapSet(T _key, Map<T, Set<V>> _map) {
Set<V> set = _map.get(_key);
if (set == null) {
set = new HashSet<>();
_map.put(_key, set);
}
return set;
}
public static <T, V> List<V> addToMultiMap(T _key, V _value, Map<T, List<V>> _map) {
List<V> list = getMultiMapList(_key, _map);
list.add(_value);
return list;
}
public static <T, V> Set<V> addToMultiMapSet(T _key, V _value, Map<T, Set<V>> _map) {
Set<V> set = getMultiMapSet(_key, _map);
set.add(_value);
return set;
}
public static int sumIntegers(Collection<Integer> _coll) {
int sum = 0;
for (Integer val : makeNotNull(_coll)) {
if (val != null)
sum += val;
}
return sum;
}
public static Double sum(Collection<Double> _coll) {
double sum = 0.0;
for (Double val : makeNotNull(_coll)) {
if (val != null)
sum += val;
}
return sum;
}
public static Double mean(Collection<Double> _coll) {
int cnt = 0;
double total = 0.0;
for (Double val : makeNotNull(_coll)) {
if (val != null) {
cnt++;
total += val;
}
}
if (cnt == 0)
return 0.0;
return total / cnt;
}
public static Double variance(Collection<Double> _coll) {
double mean = mean(_coll);
int cnt = 0;
double total = 0.0;
for (Double val : makeNotNull(_coll)) {
if (val != null) {
cnt++;
total += (val - mean) * (val - mean);
}
}
if (cnt == 0)
return 0.0;
return total / cnt;
}
public static Double standardDeviation(Collection<Double> _coll) {
return Math.sqrt(variance(_coll));
}
public static <T> List<List<T>> split(List<T> _list, int _size) {
if (_list == null)
return Collections.emptyList();
int iPieces = (_list.size() / _size) + 1;
List<List<T>> list = new ArrayList<>(iPieces);
for (int i = 0; i < iPieces; i++) {
list.add(_list.subList(i * _size, Math.min(_list.size(), (i + 1) * _size)));
}
return list;
}
public static <T> List<List<T>> splitEvenly(List<T> _list, int _maxSize) {
if (isEmpty(_list))
return Collections.emptyList();
return splitIntoPieces(_list, ((_list.size()-1) / _maxSize) + 1);
}
public static <T> List<List<T>> splitIntoPieces(List<T> _list, int _pieces) {
return splitIntoPieces(_list, _pieces, false);
}
public static <T> List<List<T>> splitIntoPieces(List<T> _list, int _pieces, boolean _createNewLists) {
if (isEmpty(_list))
return Collections.emptyList();
if (_list.size() < _pieces)
return Collections.singletonList(_list);
int size = (int)Math.ceil(((double)_list.size())/_pieces);
List<List<T>> list = new ArrayList<>(_pieces);
int offset = 0;
while (offset < _list.size()) {
List<T> subList = _list.subList(offset, Math.min(_list.size(), offset+size));
list.add(_createNewLists?new ArrayList<>(subList):subList);
offset += size;
}
return list;
}
public static <T> ArrayList<T> asArrayList(T... _values) {
if (_values == null)
return new ArrayList<>(0);
ArrayList<T> list = new ArrayList<>(_values.length);
for (T t : _values)
list.add(t);
return list;
}
public static <T> HashSet<T> asHashSet(T... _values) {
HashSet<T> setValues = new HashSet<>();
if (_values == null)
return setValues;
for (T t : _values)
setValues.add(t);
return setValues;
}
public static <K, V> HashMap<K, V> asHashMap(K _key, V _value) {
HashMap<K, V> map = new HashMap<>();
map.put(_key, _value);
return map;
}
public static <T> ArrayList<T> asArrayList(Iterable<T> _iterable) {
if (_iterable == null)
return new ArrayList<T>(0);
ArrayList<T> list = new ArrayList<>();
for (T t : _iterable)
list.add(t);
return list;
}
public static <T> ArrayList<T> asArrayList(Iterator<T> _iter) {
if (_iter == null)
return new ArrayList<T>(0);
ArrayList<T> list = new ArrayList<>();
while (_iter.hasNext())
list.add(_iter.next());
return list;
}
public static <T> HashSet<T> asHashSet(Iterable<T> _iterable) {
HashSet<T> setValues = new HashSet<>();
if (_iterable == null)
return setValues;
for (T t : _iterable)
setValues.add(t);
return setValues;
}
public static <T> HashSet<T> asHashSet(Iterator<T> _iter) {
HashSet<T> setValues = new HashSet<>();
if (_iter == null)
return setValues;
while (_iter.hasNext())
setValues.add(_iter.next());
return setValues;
}
public static <T> boolean allQualify(Collection<T> _coll, IQualifier<T> _qualifier) {
if ((_coll == null) || (_qualifier == null))
return false;
for (T t : _coll) {
if ((t == null) || !_qualifier.qualifies(t))
return false;
}
return true;
}
public static <T> boolean anyQualify(Collection<T> _coll, IQualifier<T> _qualifier) {
if ((_coll == null) || (_qualifier == null))
return false;
for (T t : _coll) {
if ((t != null) && _qualifier.qualifies(t))
return true;
}
return false;
}
public static <T> boolean noneQualify(Collection<T> _coll, IQualifier<T> _qualifier) {
if ((_coll == null) || (_qualifier == null))
return true;
for (T t : _coll) {
if ((t != null) && _qualifier.qualifies(t))
return false;
}
return true;
}
public static <T> List<T> filter(Collection<? extends T> _coll, IFilter<T> _filter) {
if ((_coll == null) || (_filter == null))
return new ArrayList<>();
List<T> listValues = new ArrayList<>();
for (T t : _coll) {
if (_filter.isFiltered(t))
listValues.add(t);
}
return listValues;
}
public static <T> T filterOne(Collection<? extends T> _coll, IFilter<T> _filter) {
if ((_coll == null) || (_filter == null))
return null;
for (T t : _coll) {
if (_filter.isFiltered(t))
return t;
}
return null;
}
public static <T> int indexOf(List<? extends T> _list, IQualifier<T> _qual) {
if ((_list == null) || (_qual == null))
return -1;
int i = 0;
for (T t : _list) {
if (_qual.qualifies(t))
return i;
i++;
}
return -1;
}
public static <T> void filterMod(Iterable<? extends T> _iterable, IFilter<T> _filter) {
if ((_iterable == null) || (_filter == null))
return;
Iterator<? extends T> iter = _iterable.iterator();
while (iter.hasNext()) {
if (!_filter.isFiltered(iter.next()))
iter.remove();
}
}
public static <T, V> List<V> filterToType(Iterable<? extends T> _iterable, Class<V> _class) {
List<V> list = new ArrayList<>();
if (_iterable == null)
return list;
for (T t : _iterable) {
if (_class.isInstance(t))
list.add(_class.cast(t));
}
return list;
}
public static <T> void edit(Iterable<T> _coll, IEditor<T> _editor) {
if ((_coll == null) || (_editor == null))
return;
for (T t : _coll) {
_editor.edit(t);
}
}
public static <T, V> List<V> transform(Collection<T> _coll, ITransformer<? super T, V> _transformer) {
return transform(_coll, _transformer, false);
}
public static <T, V> List<V> transform(Iterable<T> _iter, ITransformer<? super T, V> _transformer) {
return transform(_iter, _transformer, false);
}
public static <T, V> List<V> transform(Iterable<T> _iter, ITransformer<? super T, V> _transformer, boolean _excludeNulls) {
if ((_iter == null) || (_transformer == null))
return new ArrayList<>();
List<V> listValues = new ArrayList<>();
for (T t : _iter) {
if (_excludeNulls && (t == null))
continue;
V v = _transformer.transform(t);
if (!_excludeNulls || (v != null))
listValues.add(v);
}
return listValues;
}
public static <T, V> List<V> transform(Collection<T> _coll, ITransformer<? super T, V> _transformer, boolean _excludeNulls) {
if ((_coll == null) || (_transformer == null))
return new ArrayList<>();
List<V> listValues = new ArrayList<>(_coll.size());
for (T t : _coll) {
V v = _transformer.transform(t);
if (!_excludeNulls || (v != null))
listValues.add(v);
}
return listValues;
}
@SuppressWarnings("unchecked")
public static <T, V> V[] transform(T[] _coll, ITransformer<? super T, V> _transformer, Class<V> _destType) {
V[] ret = (V[])Array.newInstance(_destType, size(_coll));
for (int i=0; i < size(_coll); i++) {
ret[i] = _transformer.transform(_coll[i]);
}
return ret;
}
public static <T, V> Map<V, T> transformToMap(Collection<T> _coll, ITransformer<? super T, V> _transformer) {
Map<V, T> mapValues = new HashMap<>();
if ((_coll == null) || (_transformer == null))
return mapValues;
for (T t : _coll) {
V v = _transformer.transform(t);
if (v != null)
mapValues.put(v, t);
}
return mapValues;
}
public static <T, V, U> Map<V, U> transformToMap(Collection<T> _coll, ITransformer<? super T, V> _keyTrans, ITransformer<? super T, U> _valTrans) {
Map<V, U> mapValues = new HashMap<>();
if ((_coll == null) || (_keyTrans == null) || (_valTrans == null))
return mapValues;
for (T t : _coll) {
V v = _keyTrans.transform(t);
U u = _valTrans.transform(t);
if ((v != null) && (u != null))
mapValues.put(v, u);
}
return mapValues;
}
public static <T, V> Map<V, List<T>> transformToMultiMap(Collection<T> _coll, ITransformer<? super T, V> _transformer) {
Map<V, List<T>> mapValues = new HashMap<>();
if ((_coll == null) || (_transformer == null))
return mapValues;
for (T t : _coll) {
V v = _transformer.transform(t);
if (v != null)
addToMultiMap(v, t, mapValues);
}
return mapValues;
}
public static <T, V, U> Map<V, List<U>> transformToMultiMap(Collection<T> _coll, ITransformer<? super T, V> _keyTrans, ITransformer<? super T, U> _valTrans) {
Map<V, List<U>> mapValues = new HashMap<>();
if ((_coll == null) || (_keyTrans == null) || (_valTrans == null))
return mapValues;
for (T t : _coll) {
V v = _keyTrans.transform(t);
U u = _valTrans.transform(t);
if ((v != null) && (u != null))
addToMultiMap(v, u, mapValues);
}
return mapValues;
}
public static <T,V> void addAllToMap(Collection<T> _coll, ITransformer<? super T, V> _keyTrans, Map<V, T> _map) {
for (T t : makeNotNull(_coll)) {
V v = _keyTrans.transform(t);
if (v != null)
_map.put(v, t);
}
}
public static <T,V> void addAllToMultiMap(Collection<T> _coll, ITransformer<? super T, V> _keyTrans, Map<V, List<T>> _map) {
for (T t : makeNotNull(_coll)) {
V v = _keyTrans.transform(t);
if (v != null)
addToMultiMap(v, t, _map);
}
}
public static <T> String transformToCommaSeparated(Collection<T> _coll, ITransformer<? super T, String> _transformer) {
return transformToCommaSeparated(_coll, _transformer, false);
}
public static <T> String transformToCommaSeparated(Collection<T> _coll, ITransformer<? super T, String> _transformer, boolean _spaceAfterComma) {
if (_transformer == null)
return null;
return commaSeparated(transform(_coll, _transformer), _spaceAfterComma);
}
public static <T> String transformAndDelimit(Collection<T> _coll, ITransformer<? super T, String> _transformer, String _delimiter) {
return transformAndDelimit(_coll, _transformer, _delimiter, false);
}
public static <T> String transformAndDelimit(Collection<T> _coll, ITransformer<? super T, String> _transformer, String _delimiter, boolean _discardEmptyValues) {
if (_transformer == null)
return null;
return delimit(transform(_coll, _transformer), _delimiter, _discardEmptyValues);
}
public static String commaSeparated(Collection<String> _values) {
return commaSeparated(_values, false);
}
public static String commaSeparated(Collection<String> _values, boolean _spaceAfterComma) {
return delimit(_values, _spaceAfterComma ? ", " : ",");
}
public static String delimit(Collection<String> _values, String _delimiter) {
return delimit(_values, _delimiter, false);
}
public static String delimit(Collection<String> _values, String _delimiter, boolean _discardEmptyValues) {
StringBuilder builder = null;
for (String value : makeNotNull(_values)) {
if (_discardEmptyValues && NullUtils.isEmpty(value))
continue;
if (builder == null)
builder = new StringBuilder();
else
builder.append(_delimiter);
builder.append(value);
}
if (builder != null)
return builder.toString();
return null;
}
public static List<String> undelimit(String _value, String _delimiter) {
return undelimit(_value, _delimiter, true);
}
public static List<String> undelimit(String _value, String _delimiter, boolean _discardEmptyValues) {
if (_value == null)
return new ArrayList<>();
return asArrayList(_discardEmptyValues?NullUtils.cleanSplit(_value, _delimiter): _value.split(_delimiter));
}
public static <T, V> Set<V> transformToSet(Collection<T> _coll, ITransformer<T, V> _transformer) {
Set<V> setValues = new HashSet<V>();
if ((_coll == null) || (_transformer == null))
return setValues;
for (T t : _coll) {
if (t != null) {
V v = _transformer.transform(t);
if (v != null)
setValues.add(v);
}
}
return setValues;
}
public static <T extends Comparable<T>> T getSmallest(Collection<T> _collObjects)
{
return getSmallest(_collObjects, new Comparator<T>()
{
@Override
public int compare(T o1, T o2)
{
return NullUtils.compare(o1, o2);
}
});
}
public static <T> T getSmallest(Collection<T> _objects, Comparator<T> _comparator)
{
if (_objects == null)
return null;
T ret = null;
for (T t : _objects)
{
if (t == null)
continue;
if ((ret == null) || (_comparator.compare(t, ret) < 0))
ret = t;
}
return ret;
}
public static <T extends Comparable<T>> List<T> getAllSmallest(Collection<T> _collObjects)
{
return getAllSmallest(_collObjects, new Comparator<T>()
{
@Override
public int compare(T o1, T o2)
{
return NullUtils.compare(o1, o2);
}
});
}
public static <T> List<T> getAllSmallest(Collection<T> _objects, Comparator<T> _comparator)
{
final List<T> ret = new ArrayList<>();
if (_objects == null)
return ret;
for (T t : _objects) {
if (t == null)
continue;
if (ret.isEmpty())
ret.add(t);
else {
int comp = _comparator.compare(t, CollectionUtils.getFirst(ret));
if (comp == 0)
ret.add(t);
else if (comp < 0) {
ret.clear();
ret.add(t);
}
}
}
return ret;
}
public static <T extends Comparable<T>> T getLargest(Collection<T> _collObjects)
{
return getLargest(_collObjects, new Comparator<T>()
{
@Override
public int compare(T o1, T o2)
{
return NullUtils.compare(o1, o2);
}
});
}
public static <T> T getLargest(Collection<T> _objects, Comparator<T> _comparator)
{
if (_objects == null)
return null;
T ret = null;
for (T t : _objects)
{
if (t == null)
continue;
if ((ret == null) || (_comparator.compare(t, ret) > 0))
ret = t;
}
return ret;
}
public static <T> List<T> getSmallest(Collection<T> _objects, Comparator<T> _comparator, int _count)
{
return getSmallest(_objects, _comparator, _count, null);
}
public static <T> List<T> getSmallest(Collection<T> _objects, Comparator<T> _comparator, int _count, IFilter<T> _filter)
{
if (_objects == null)
return null;
if (_count * 4 > _objects.size())
{
List<T> items = new ArrayList<T>();
for (T t : _objects)
{
if ((_filter == null) || !_filter.isFiltered(t))
items.add(t);
}
Collections.sort(items, _comparator);
return subList(items, 0, _count);
}
TreeMap<T, List<T>> mapReturn = new TreeMap<>(_comparator);
for (T t : _objects)
{
if ((t == null) || ((_filter != null) && _filter.isFiltered(t)))
continue;
if (mapReturn.size() < _count)
addToMultiMap(t, t, mapReturn);
else
{
Iterator<T> iter = mapReturn.descendingKeySet().iterator();
if (_comparator.compare(t, iter.next()) < 0)
{
iter.remove();
addToMultiMap(t, t, mapReturn);
}
}
}
List<T> items = new ArrayList<T>(_count);
for (List<T> list : mapReturn.values())
{
items.addAll(list);
}
return subList(items, 0, _count);
}
public static <T> List<T> subList(List<T> _list, int _fromIndex, int _toIndex) {
if ((_list == null) || (_fromIndex > _list.size() - 1))
return new ArrayList<T>();
return _list.subList(_fromIndex, Math.min(_toIndex, _list.size()));
}
public static <T extends Comparable<T>> T mostCommon(Collection<T> _collObjects) {
return mostCommon(_collObjects, new Comparator<T>() {
@Override
public int compare(T _o1, T _o2) {
return NullUtils.compare(_o1, _o2);
}
});
}
public static <T> T mostCommon(Collection<T> _collObjects, Comparator<? super T> _comparator) {
int iMax = 0;
Map<T, AtomicInteger> mapCounts = new TreeMap<T, AtomicInteger>(_comparator);
for (T t : makeNotNull(_collObjects)) {
AtomicInteger i = mapCounts.get(t);
if (i == null) {
mapCounts.put(t, new AtomicInteger(1));
if (iMax == 0)
iMax = 1;
} else {
if (i.incrementAndGet() > iMax)
iMax = i.intValue();
}
}
for (Entry<T, AtomicInteger> e : mapCounts.entrySet()) {
if (e.getValue().intValue() == iMax)
return e.getKey();
}
return null;
}
public static <T,V> List<V> getAll(Map<T, V> _map, Collection<T> _keys) {
List<V> ret = new ArrayList<>();
if (_map == null)
return ret;
for (T t : makeNotNull(_keys)) {
V v = _map.get(t);
if (v != null)
ret.add(v);
}
return ret;
}
public static byte[] toByteArray(Collection<Integer> _integers) {
if (isEmpty(_integers))
return null;
ByteBuffer bb = ByteBuffer.allocate(_integers.size() * 4);
for (Integer i : _integers) {
bb.putInt(i);
}
return bb.array();
}
public static List<Integer> fromByteArrayOfIntegers(byte[] _btIntegers) {
if (length(_btIntegers) > 0) {
List<Integer> auxIds = new ArrayList<>(_btIntegers.length / 4);
ByteBuffer bb = ByteBuffer.wrap(_btIntegers);
while (bb.hasRemaining()) {
auxIds.add(bb.getInt());
}
return auxIds;
}
return new ArrayList<>();
}
public static int length(byte[] _arr)
{
if (_arr == null)
return 0;
return _arr.length;
}
}

View File

@@ -0,0 +1,658 @@
package com.lanternsoftware.util;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
public abstract class DateUtils {
public static long toLong(Date _dt) {
if (_dt == null)
return Long.MIN_VALUE;
return _dt.getTime();
}
public static Date toDate(long _epochOffset) {
if (_epochOffset == Long.MIN_VALUE)
return null;
return new Date(_epochOffset);
}
public static Date millisecondsFromNow(long _milliseconds) {
return new Date(new Date().getTime() + _milliseconds);
}
public static Date secondsFromNow(long _seconds) {
return addSeconds(new Date(), _seconds);
}
public static Date minutesFromNow(int _minutes) {
return addMinutes(new Date(), _minutes);
}
public static Date hoursFromNow(int _hours) {
return addHours(new Date(), _hours);
}
public static Date daysFromNow(int _days) {
return addDays(new Date(), _days);
}
public static Date addSeconds(Date _dt, long _seconds) {
if (_dt == null)
return null;
return new Date(_dt.getTime() + _seconds * 1000L);
}
public static Date addMinutes(Date _dt, int _minutes) {
if (_dt == null)
return null;
return new Date(_dt.getTime() + _minutes * 60000L);
}
public static Date addHours(Date _dt, int _hours) {
if (_dt == null)
return null;
return new Date(_dt.getTime() + _hours * 3600000L);
}
public static Date addDays(Date _dt, int _days) {
if (_dt == null)
return null;
return new Date(_dt.getTime() + _days * 86400000L);
}
public static Date addDays(Date _dt, int _days, TimeZone _tz) {
if (_dt == null)
return null;
Calendar cal = toCalendar(_dt, _tz);
cal.add(Calendar.DAY_OF_YEAR, _days);
return cal.getTime();
}
public static Date addMonths(Date _dt, int _months) {
Calendar cal = GregorianCalendar.getInstance();
cal.setTime(_dt);
cal.add(Calendar.MONTH, _months);
return cal.getTime();
}
public static Date addMonths(Date _dt, int _months, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
if (cal == null)
return null;
cal.add(Calendar.MONTH, _months);
return cal.getTime();
}
public static Date addMonthKeepDayOfWeek(Date _dt, int _months, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
if (cal == null)
return null;
int dayOfWeek = cal.get(Calendar.DAY_OF_WEEK);
int dayOfWeekInMonth = cal.get(Calendar.DAY_OF_WEEK_IN_MONTH);
cal.add(Calendar.MONTH, _months);
cal.set(Calendar.DAY_OF_WEEK, dayOfWeek);
cal.set(Calendar.DAY_OF_WEEK_IN_MONTH, dayOfWeekInMonth);
return cal.getTime();
}
public static Date addYears(Date _dt, int _years) {
Calendar cal = GregorianCalendar.getInstance();
cal.setTime(_dt);
cal.add(Calendar.YEAR, _years);
return cal.getTime();
}
public static Date addYears(Date _dt, int _years, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
if (cal == null)
return null;
cal.add(Calendar.YEAR, _years);
return cal.getTime();
}
public static long diffInMilliseconds(Date _dt1, Date _dt2) {
return diffInMilliseconds(_dt1, _dt2, Long.MAX_VALUE);
}
public static long diffInMilliseconds(Date _dt1, Date _dt2, long _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return Math.abs(_dt1.getTime() - _dt2.getTime());
}
public static long diffInSeconds(Date _dt1, Date _dt2) {
return diffInSeconds(_dt1, _dt2, Long.MAX_VALUE);
}
public static long diffInSeconds(Date _dt1, Date _dt2, long _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return Math.abs(_dt1.getTime() - _dt2.getTime()) / 1000;
}
public static long diffInMinutes(Date _dt1, Date _dt2) {
return diffInMinutes(_dt1, _dt2, Long.MAX_VALUE);
}
public static long diffInMinutes(Date _dt1, Date _dt2, long _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return Math.abs(_dt1.getTime() - _dt2.getTime()) / 60000;
}
public static long diffInHours(Date _dt1, Date _dt2) {
return diffInHours(_dt1, _dt2, Long.MAX_VALUE);
}
public static long diffInHours(Date _dt1, Date _dt2, long _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return Math.abs(_dt1.getTime() - _dt2.getTime()) / 3600000;
}
public static boolean isAfter(Date _dt1, Date _dt2) {
return isAfter(_dt1, _dt2, false);
}
public static boolean isAfter(Date _dt1, Date _dt2, boolean _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return _dt1.after(_dt2);
}
public static boolean isAfterOrEqualTo(Date _dt1, Date _dt2) {
return isAfterOrEqualTo(_dt1, _dt2, false);
}
public static boolean isAfterOrEqualTo(Date _dt1, Date _dt2, boolean _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return _dt1.getTime() >= _dt2.getTime();
}
public static boolean isBefore(Date _dt1, Date _dt2) {
return isBefore(_dt1, _dt2, false);
}
public static boolean isBefore(Date _dt1, Date _dt2, boolean _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return _dt1.before(_dt2);
}
public static boolean isBeforeOrEqualTo(Date _dt1, Date _dt2) {
return isBeforeOrEqualTo(_dt1, _dt2, false);
}
public static boolean isBeforeOrEqualTo(Date _dt1, Date _dt2, boolean _defaultIfNull) {
if ((_dt1 == null) || (_dt2 == null))
return _defaultIfNull;
return _dt1.getTime() <= _dt2.getTime();
}
public static String getAge(Date _dtDOB) {
if (_dtDOB == null)
return "";
return getAge(_dtDOB, getMidnightBeforeNow());
}
public static String getAge(Date _dtDOB, Date _dtReference) {
if (_dtDOB == null)
return "";
return getAge(_dtDOB.getTime(), _dtReference.getTime());
}
public static String getAge(long _dob, long _reference) {
long lAge = _reference - _dob;
if (lAge < 24 * 3600000) // less than a day old
return String.format("%.2d:%.2d hours", lAge / 3600000, (lAge % 3600000) / 60000);
if (lAge < 7 * 24 * 3600000) // less than a week old
return String.format("%d days", lAge / (24 * 3600000));
Date dtStart = new Date(_dob);
Date dtEnd = new Date(_reference);
int iMonths = getMonthsBetween(dtStart, dtEnd);
if (iMonths == 0)
return String.format("%d days", (dtEnd.getTime() - dtStart.getTime()) / (7 * 24 * 3600000));
int iYears = getYearsBetween(dtStart, dtEnd);
if (iYears < 2)
return String.format("%d months", iMonths);
return String.format("%d years", iYears);
}
public static int getMonthsBetween(Date _dtStart, Date _dtEnd) {
Calendar calStart = getGMTCalendar(_dtStart.getTime());
Calendar calEnd = getGMTCalendar(_dtEnd.getTime());
int diff = calEnd.get(Calendar.YEAR) * 24 + calEnd.get(Calendar.MONTH) - calStart.get(Calendar.YEAR) * 24 + calStart.get(Calendar.MONTH);
if (calStart.get(Calendar.DAY_OF_MONTH) > calEnd.get(Calendar.DAY_OF_MONTH))
diff--;
return diff;
}
public static int getYearsBetween(Date _dtStart, Date _dtEnd) {
Calendar calStart = getGMTCalendar(_dtStart.getTime());
Calendar calEnd = getGMTCalendar(_dtEnd.getTime());
int diff = calEnd.get(Calendar.YEAR) - calStart.get(Calendar.YEAR);
if (isLaterInYear(calStart, calEnd))
diff--;
return diff;
}
public static int getAgeInYears(Date _dtDOB) {
if (_dtDOB == null)
return 0;
return getAgeInYears(_dtDOB.getTime());
}
public static int getAgeInYears(Date _dtDOB, Date _dtReference) {
if (_dtDOB == null)
return 0;
return getAgeInYears(_dtDOB.getTime(), _dtReference);
}
public static int getAgeInYears(long _lDOB) {
return getAgeInYears(_lDOB, getMidnightBeforeNow());
}
public static int getAgeInYears(long _lDOB, Date _dtReference) {
if (_lDOB == 0 || _dtReference == null)
return 0;
Calendar calDOB = getGMTCalendar(_lDOB);
Calendar calToday = getGMTCalendar(_dtReference.getTime());
int age = calToday.get(Calendar.YEAR) - calDOB.get(Calendar.YEAR);
if (!isLaterInYear(calToday, calDOB))
age--;
return age;
}
public static Calendar getGMTCalendar(long _lTime) {
return toCalendar(_lTime, TimeZone.getTimeZone("GMT"));
}
private static boolean isLaterInYear(Calendar _cal1, Calendar _cal2) {
if (_cal1.get(Calendar.MONTH) > _cal2.get(Calendar.MONTH))
return true;
return (_cal1.get(Calendar.MONTH) == _cal2.get(Calendar.MONTH)) && (_cal1.get(Calendar.DAY_OF_MONTH) >= _cal2.get(Calendar.DAY_OF_MONTH));
}
public static boolean isSameDay(Date _d1, Date _d2, TimeZone _tz) {
return getMidnightBefore(_d1, _tz).equals(getMidnightBefore(_d2, _tz));
}
public static boolean isSameDayOfWeek(Date _d1, Date _d2, TimeZone _tz) {
return getDayOfWeek(_d1, _tz) == getDayOfWeek(_d2, _tz);
}
public static boolean isSameTimeOfDay(Date _d1, Date _d2, TimeZone _tz) {
Calendar cal1 = toCalendar(_d1, _tz);
Calendar cal2 = toCalendar(_d2, _tz);
if ((cal1 == null) || (cal2 == null))
return false;
if (cal1.get(Calendar.HOUR_OF_DAY) != cal2.get(Calendar.HOUR_OF_DAY))
return false;
if (cal1.get(Calendar.MINUTE) != cal2.get(Calendar.MINUTE))
return false;
if (cal1.get(Calendar.SECOND) != cal2.get(Calendar.SECOND))
return false;
return (cal1.get(Calendar.MILLISECOND) == cal2.get(Calendar.MILLISECOND));
}
public static Date getMidnightBeforeNow() {
return getMidnightBeforeNow(TimeZone.getTimeZone("GMT"));
}
public static Date getMidnightBeforeNow(TimeZone _tz) {
return hoursAfterMidnight(new Date(), 0, _tz);
}
public static Calendar getMidnightBeforeNowCal(TimeZone _tz) {
return hoursAfterMidnightCal(new Date(), 0, _tz);
}
public static Date getMidnightBefore(Date _dt, TimeZone _tz) {
return hoursAfterMidnight(_dt, 0, _tz);
}
public static Calendar getMidnightBeforeCal(Date _dt, TimeZone _tz) {
return hoursAfterMidnightCal(_dt, 0, _tz);
}
public static Date getMidnightAfterNow() {
return getMidnightAfterNow(TimeZone.getTimeZone("GMT"));
}
public static Date getMidnightAfterNow(TimeZone _tz) {
return hoursAfterMidnight(new Date(), 24, _tz);
}
public static Calendar getMidnightAfterNowCal(TimeZone _tz) {
return hoursAfterMidnightCal(new Date(), 24, _tz);
}
public static Date getMidnightAfter(Date _dt, TimeZone _tz) {
return hoursAfterMidnight(_dt, 24, _tz);
}
public static Calendar getMidnightAfterCal(Date _dt, TimeZone _tz) {
return hoursAfterMidnightCal(_dt, 24, _tz);
}
public static Date hoursAfterMidnight(Date _dt, int _hours, TimeZone _tz) {
return hoursAfterMidnightCal(_dt, _hours, _tz).getTime();
}
public static Calendar hoursAfterMidnightCal(Date _dt, int _hours, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
if (cal == null)
return null;
cal.set(Calendar.HOUR_OF_DAY, _hours);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal;
}
public static boolean isBetween(Date _dt, Date _dtFrom, Date _dtTo) {
if (_dt == null)
return false;
if ((_dtFrom != null) && _dtFrom.after(_dt))
return false;
return (_dtTo == null) || _dtTo.after(_dt);
}
public static Date setTimeOfDay(Date _date, Date _time, TimeZone _tz) {
Calendar date = toCalendar(_date, _tz);
Calendar time = toCalendar(_time, _tz);
if ((date == null) || (time == null))
return null;
date.set(Calendar.HOUR_OF_DAY, time.get(Calendar.HOUR_OF_DAY));
date.set(Calendar.MINUTE, time.get(Calendar.MINUTE));
date.set(Calendar.SECOND, time.get(Calendar.SECOND));
date.set(Calendar.MILLISECOND, time.get(Calendar.MILLISECOND));
return date.getTime();
}
public static Calendar toCalendar(long _ts, TimeZone _tz) {
return toCalendar(new Date(_ts), _tz);
}
public static Calendar toCalendar(Date _date, TimeZone _tz) {
if (_date == null)
return null;
Calendar cal = new GregorianCalendar(_tz);
cal.setTime(_date);
return cal;
}
public static DateFormat dateFormat(String _format, TimeZone _tz) {
SimpleDateFormat format = new SimpleDateFormat(_format);
format.setTimeZone(_tz);
return format;
}
public static String format(String _format, Date _dt) {
return format(_format, TimeZone.getTimeZone("UTC"), _dt);
}
public static String format(String _format, TimeZone _tz, Date _dt) {
if (_dt == null)
return null;
return dateFormat(_format, _tz).format(_dt);
}
public static Date parse(String _format, String _date) {
return parse(_format, TimeZone.getTimeZone("UTC"), _date);
}
public static Date parse(String _format, TimeZone _tz, String _date) {
if (NullUtils.isEmpty(_date))
return null;
try {
return dateFormat(_format, _tz).parse(_date);
}
catch (Exception _e) {
return null;
}
}
public static Date date(int _month, int _day, int _year, TimeZone _tz) {
return date(_month, _day, _year, 0, 0, 0, 0, _tz);
}
public static Date date(int _month, int _day, int _year, int _hour, int _minutes, int _seconds, int _ms, TimeZone _tz) {
Calendar cal = GregorianCalendar.getInstance(_tz);
cal.set(Calendar.YEAR, _year);
cal.set(Calendar.MONTH, _month - 1);
cal.set(Calendar.DAY_OF_MONTH, _day);
cal.set(Calendar.HOUR_OF_DAY, _hour);
cal.set(Calendar.MINUTE, _minutes);
cal.set(Calendar.SECOND, _seconds);
cal.set(Calendar.MILLISECOND, _ms);
return cal.getTime();
}
public static int getDayOfWeek(Date _dt, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
return cal == null ? 0 : cal.get(Calendar.DAY_OF_WEEK);
}
public static Date setDayOfWeek(Date _dt, TimeZone _tz, int _dayOfWeek) {
Calendar cal = toCalendar(_dt, _tz);
if (cal == null)
return null;
if ((_dayOfWeek >= Calendar.SUNDAY) && (_dayOfWeek <= Calendar.SATURDAY))
cal.set(Calendar.DAY_OF_WEEK, _dayOfWeek);
return cal.getTime();
}
public static Date getMidnightBeforeDayOfWeek(Date _dt, TimeZone _tz, int _dayOfWeek) {
return getMidnightBefore(setDayOfWeek(_dt, _tz, _dayOfWeek), _tz);
}
public static Date getMidnightAfterDayOfWeek(Date _dt, TimeZone _tz, int _dayOfWeek) {
return getMidnightAfter(setDayOfWeek(_dt, _tz, _dayOfWeek), _tz);
}
public static boolean isDstTransitionDay(Date _dt, TimeZone _tz) {
Date midnight = getMidnightBefore(_dt, _tz);
Calendar cal = toCalendar(midnight, _tz);
if (cal == null)
return false;
cal.set(Calendar.HOUR_OF_DAY, 8);
return (cal.getTimeInMillis() - midnight.getTime() != 28800000);
}
public static Date setDayOfWeek(Date _dt, TimeZone _tz, String _dayOfWeek) {
Calendar cal = toCalendar(_dt, _tz);
if (cal == null)
return null;
int dayOfWeekInt = 0;
switch (_dayOfWeek) {
case "Sunday":
dayOfWeekInt = Calendar.SUNDAY;
break;
case "Monday":
dayOfWeekInt = Calendar.MONDAY;
break;
case "Tuesday":
dayOfWeekInt = Calendar.TUESDAY;
break;
case "Wednesday":
dayOfWeekInt = Calendar.WEDNESDAY;
break;
case "Thursday":
dayOfWeekInt = Calendar.THURSDAY;
break;
case "Friday":
dayOfWeekInt = Calendar.FRIDAY;
break;
case "Saturday":
dayOfWeekInt = Calendar.SATURDAY;
break;
}
if (dayOfWeekInt > 0)
cal.set(Calendar.DAY_OF_WEEK, dayOfWeekInt);
return cal.getTime();
}
public static Date getStartOfMinute(TimeZone _tz) {
return getStartOfMinute(new Date(), _tz);
}
public static Date getStartOfMinute(Date _dt, TimeZone _tz) {
return getStartOfMinuteCal(_dt, _tz).getTime();
}
public static Calendar getStartOfMinuteCal(Date _dt, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal;
}
public static Date getEndOfMinute(TimeZone _tz) {
return getEndOfMinute(new Date(), _tz);
}
public static Date getEndOfMinute(Date _dt, TimeZone _tz) {
return getEndOfMinuteCal(_dt, _tz).getTime();
}
public static Calendar getEndOfMinuteCal(Date _dt, TimeZone _tz) {
Calendar cal = getStartOfMinuteCal(_dt, _tz);
cal.add(Calendar.MINUTE, 1);
return cal;
}
public static Date getStartOfHour(TimeZone _tz) {
return getStartOfHour(new Date(), _tz);
}
public static Date getStartOfHour(Date _dt, TimeZone _tz) {
return getStartOfHourCal(_dt, _tz).getTime();
}
public static Calendar getStartOfHourCal(Date _dt, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal;
}
public static Date getEndOfHour(TimeZone _tz) {
return getEndOfHour(new Date(), _tz);
}
public static Date getEndOfHour(Date _dt, TimeZone _tz) {
return getEndOfHourCal(_dt, _tz).getTime();
}
public static Calendar getEndOfHourCal(Date _dt, TimeZone _tz) {
Calendar cal = getStartOfHourCal(_dt, _tz);
cal.add(Calendar.HOUR_OF_DAY, 1);
return cal;
}
public static Date getStartOfWeek(TimeZone _tz) {
return getStartOfWeek(new Date(), _tz);
}
public static Date getStartOfWeek(Date _dt, TimeZone _tz) {
return getStartOfWeekCal(_dt, _tz).getTime();
}
public static Calendar getStartOfWeekCal(Date _dt, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
return cal;
}
public static Date getEndOfWeek(TimeZone _tz) {
return getEndOfWeek(new Date(), _tz);
}
public static Date getEndOfWeek(Date _dt, TimeZone _tz) {
return getEndOfWeekCal(_dt, _tz).getTime();
}
public static Calendar getEndOfWeekCal(Date _dt, TimeZone _tz) {
Calendar cal = getStartOfWeekCal(_dt, _tz);
cal.add(Calendar.DAY_OF_YEAR, 7);
return cal;
}
public static Date getStartOfMonth(TimeZone _tz) {
return getStartOfMonth(new Date(), _tz);
}
public static Date getStartOfMonth(Date _dt, TimeZone _tz) {
return getStartOfMonthCal(_dt, _tz).getTime();
}
public static Calendar getStartOfMonthCal(Date _dt, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.set(Calendar.DAY_OF_MONTH, 1);
return cal;
}
public static Date getEndOfMonth(TimeZone _tz) {
return getEndOfMonth(new Date(), _tz);
}
public static Date getEndOfMonth(Date _dt, TimeZone _tz) {
return getEndOfMonthCal(_dt, _tz).getTime();
}
public static Calendar getEndOfMonthCal(Date _dt, TimeZone _tz) {
Calendar cal = getStartOfMonthCal(_dt, _tz);
cal.add(Calendar.MONTH, 1);
return cal;
}
public static Date getStartOfYear(TimeZone _tz) {
return getStartOfYear(new Date(), _tz);
}
public static Date getStartOfYear(Date _dt, TimeZone _tz) {
return getStartOfYearCal(_dt, _tz).getTime();
}
public static Calendar getStartOfYearCal(Date _dt, TimeZone _tz) {
Calendar cal = toCalendar(_dt, _tz);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.set(Calendar.DAY_OF_YEAR, 1);
return cal;
}
public static Date getEndOfYear(TimeZone _tz) {
return getEndOfYear(new Date(), _tz);
}
public static Date getEndOfYear(Date _dt, TimeZone _tz) {
return getEndOfYearCal(_dt, _tz).getTime();
}
public static Calendar getEndOfYearCal(Date _dt, TimeZone _tz) {
Calendar cal = getStartOfYearCal(_dt, _tz);
cal.add(Calendar.YEAR, 1);
return cal;
}
}

View File

@@ -0,0 +1,147 @@
package com.lanternsoftware.util;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.ServiceLoader;
import com.lanternsoftware.util.tracing.TraceLog;
import com.lanternsoftware.util.tracing.TraceTags;
import com.lanternsoftware.util.tracing.TracerConfig;
import com.lanternsoftware.util.tracing.ITracer;
import com.lanternsoftware.util.tracing.TraceContext;
import com.lanternsoftware.util.tracing.TraceDuration;
import org.slf4j.Logger;
public class DebugTimer {
private final Logger LOG;
private final String name;
private final TraceContext context;
private Date start;
private TraceDuration duration;
private TraceTags tags;
private List<TraceLog> logs;
private final boolean suppressLocalLogs;
private static ITracer tracer = null;
private static TracerConfig config;
private static final ThreadLocal<TraceContext> traceContexts = new ThreadLocal<>();
public DebugTimer(String _name) {
this(null, _name, null, false);
}
public DebugTimer(TraceContext _parent, String _name) {
this(_parent, _name, null, false);
}
public DebugTimer(String _name, Logger _log) {
this(null, _name, _log, false);
}
public DebugTimer(String _context, String _name) {
this(TraceContext.deserialize(_context), _name, null, false);
}
public DebugTimer(String _context, String _name, Logger _log) {
this(TraceContext.deserialize(_context), _name, _log, false);
}
public DebugTimer(TraceContext _parent, String _name, Logger _log) {
this(_parent, _name, _log, false);
}
public DebugTimer(TraceContext _parent, String _name, Logger _log, boolean _suppressLocalLogs) {
TraceContext parent = _parent;
name = _name;
LOG = _log;
suppressLocalLogs = _suppressLocalLogs;
start = new Date();
if ((parent == null) && (config != null) && config.isUseThreadContext()) {
parent = traceContexts.get();
traceContexts.set(getContext());
}
context = parent;
}
public static void setTracerConfig(TracerConfig _config) {
config = _config;
if (tracer == null) {
Iterator<ITracer> iter = ServiceLoader.load(ITracer.class).iterator();
if (iter.hasNext()) {
tracer = iter.next();
}
}
tracer.config(config);
}
private ITracer getTracer() {
if (config == null)
return null;
return tracer;
}
public void tag(String _name, String _value) {
if (tags == null)
tags = TraceTags.tag(_name, _value);
else
tags.put(_name, _value);
}
public void log(String _event) {
if (logs == null)
logs = new ArrayList<>();
logs.add(new TraceLog(traceDuration().currentTimeOffset(), _event));
}
private TraceDuration traceDuration() {
if (duration != null)
return duration;
ITracer t = getTracer();
duration = (t == null)?new TraceDuration(start):t.createDuration(context, name, start);
return duration;
}
public void start() {
if (duration == null)
start = new Date();
traceDuration().start();
}
public TraceContext stop() {
traceDuration().stop();
return print();
}
public void stopDoNotPrint() {
traceDuration().stop();
}
public long duration() {
return traceDuration().duration();
}
public TraceContext getContext() {
return traceDuration().getContext();
}
public TraceContext print() {
StringBuilder b = new StringBuilder(name);
b.append(": ");
b.append(traceDuration().duration());
b.append("ms");
TraceContext newContext = traceDuration().getContext();
ITracer t = getTracer();
if (t != null)
t.trace(name, duration, tags, logs);
if (!suppressLocalLogs && ((config == null) || !config.isSuppressLocalLog())) {
if (LOG != null)
LOG.debug(b.toString());
else
System.out.println(b.toString());
}
traceContexts.set(context);
return newContext;
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util;
import java.util.List;
public interface IAggregator<T, V> {
List<V> aggregate(T _t);
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util;
public interface IEditor<T> {
void edit(T _t);
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util;
public interface IEquals<T> {
boolean equals(T _t1, T _t2);
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util;
public interface IFilter<T> {
boolean isFiltered(T _t);
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util;
public interface IQualifier<T> {
boolean qualifies(T _t);
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util;
public interface ISupplier<T> {
T get();
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util;
public interface ITransformer<T, V> {
V transform(T _t);
}

View File

@@ -0,0 +1,6 @@
package com.lanternsoftware.util;
public abstract class LanternFiles {
public static final String SOURCE_PATH = "C:\\lantern\\wc\\opensource\\LanternPowerMonitor\\";
public static final String OPS_PATH = "D:\\zwave\\";
}

View File

@@ -0,0 +1,23 @@
package com.lanternsoftware.util;
public class MapUtils {
private static final double RADIUS_EARTH = 6371000;
/**
* @return Distance between the two points in meters
*/
public static double distance(double _latitude1, double _longitude1, double _latitude2, double _longitude2)
{
double latitude = Math.toRadians(_latitude2 - _latitude1);
double longitude = Math.toRadians(_longitude2 - _longitude1);
double a = Math.sin(latitude / 2) * Math.sin(latitude / 2) + Math.cos(Math.toRadians(_latitude1)) * Math.cos(Math.toRadians(_latitude2)) * Math.sin(longitude / 2) * Math.sin(longitude / 2);
return RADIUS_EARTH * 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
}
/**
* @return Time in ms to travel from one point to another as the crow flies at the given speed
*/
public static long travelTime(double _latitude1, double _longitude1, double _latitude2, double _longitude2, double _speedMetersPerSecond) {
return (long)(1000*distance(_latitude1, _longitude1, _latitude2, _longitude2)/_speedMetersPerSecond);
}
}

View File

@@ -0,0 +1,84 @@
package com.lanternsoftware.util;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
public class MemoryStats {
public static long size(Object _o) {
return size(_o, new HashSet<Integer>());
}
private static long size(Object _o, Set<Integer> _counted) {
int hash = System.identityHashCode(_o);
if (_counted.contains(hash))
return 0;
_counted.add(hash);
long size = 0;
for (Field f : allFields(_o.getClass())) {
if (Modifier.isStatic(f.getModifiers()) || Modifier.isTransient(f.getModifiers()))
continue;
f.setAccessible(true);
try {
Object child = f.get(_o);
if (child != null) {
if (f.getType().equals(String.class))
size += ((String)child).length();
else if (f.getType().equals(Date.class))
size += 8;
else if (f.getType().equals(Double.class) || f.getType().equals(Double.TYPE))
size += 8;
else if (f.getType().equals(Float.class) || f.getType().equals(Float.TYPE))
size += 4;
else if (f.getType().equals(Long.class) || f.getType().equals(Long.TYPE))
size += 8;
else if (f.getType().equals(Integer.class) || f.getType().equals(Integer.TYPE))
size += 4;
else if (f.getType().equals(Short.class) || f.getType().equals(Short.TYPE))
size += 2;
else if (f.getType().equals(Byte.class) || f.getType().equals(Byte.TYPE))
size += 1;
else if (f.getType().equals(Character.class) || f.getType().equals(Character.TYPE))
size += 1;
else if (f.getType().equals(Boolean.class) || f.getType().equals(Boolean.TYPE))
size += 1;
else if (f.getType().equals(byte[].class))
size += ((byte[])child).length;
else if (f.getType().equals(char[].class))
size += ((char[])child).length;
else if (Collection.class.isAssignableFrom(f.getType())) {
for (Object childElement : ((Collection)child)) {
size += size(childElement, _counted);
}
}
else if (Map.class.isAssignableFrom(f.getType())) {
Set<Entry<?, ?>> entries = ((Map)child).entrySet();
for (Entry<?, ?> childElement : entries) {
size += size(childElement.getKey(), _counted);
size += size(childElement.getValue(), _counted);
}
}
else
size += size(child, _counted);
}
} catch (IllegalAccessException _e) {
}
}
return size;
}
private static List<Field> allFields(Class _c) {
if (_c == null)
return Collections.emptyList();
List<Field> fields = CollectionUtils.asArrayList(_c.getDeclaredFields());
fields.addAll(allFields(_c.getSuperclass()));
return fields;
}
}

View File

@@ -0,0 +1,397 @@
package com.lanternsoftware.util;
import org.apache.commons.codec.binary.Hex;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public class NullUtils {
public static boolean isNotEqual(Object a, Object b) {
return !isEqual(a, b);
}
public static boolean isEqual(Object a, Object b) {
if (a != null)
return (b != null) && a.equals(b);
return (b == null);
}
public static <T> boolean isNotEqual(T a, T b, IEquals<T> _equals) {
return !isEqual(a, b, _equals);
}
public static <T> boolean isEqual(T a, T b, IEquals<T> _equals) {
if (a != null)
return (b != null) && _equals.equals(a, b);
return (b == null);
}
public static boolean equalsIgnoreCase(String a, String b) {
if (a != null)
return a.equalsIgnoreCase(b);
return (b == null);
}
public static int length(String _val) {
if (_val == null)
return 0;
return _val.length();
}
public static boolean isEmpty(String _sVal) {
return (_sVal == null) || (_sVal.length() == 0);
}
public static boolean isAnyEmpty(String... _vals) {
if (_vals == null)
return true;
for (String val : _vals) {
if (isEmpty(val))
return true;
}
return false;
}
public static boolean isNotEmpty(String _sVal) {
return !isEmpty(_sVal);
}
public static boolean isAnyNotEmpty(String... _vals) {
if (_vals == null)
return false;
for (String val : _vals) {
if (isNotEmpty(val))
return true;
}
return false;
}
public static boolean isAnyNull(Object... _o) {
if ((_o == null) || (_o.length == 0))
return false;
for (Object o : _o) {
if (o == null)
return true;
}
return false;
}
public static boolean isOneOf(Object _o, Object... _values) {
if ((_o == null) || (_values == null) || (_values.length == 0))
return false;
for (Object o : _values) {
if (_o.equals(o))
return true;
}
return false;
}
public static String trim(String _val) {
if (_val == null)
return null;
return _val.trim();
}
public static String toString(byte[] _arrBytes) {
if (_arrBytes == null)
return null;
try {
return new String(_arrBytes, "UTF-8");
}
catch (UnsupportedEncodingException e) {
return null;
}
}
public static byte[] toByteArray(String _value) {
if (_value == null)
return null;
try {
return _value.getBytes("UTF-8");
}
catch (UnsupportedEncodingException e) {
return null;
}
}
public static int toInteger(String _value) {
try {
return Integer.valueOf(makeNotNull(_value));
}
catch (NumberFormatException _e) {
return 0;
}
}
public static long toLong(String _value) {
try {
return Long.valueOf(makeNotNull(_value));
}
catch (NumberFormatException _e) {
return 0;
}
}
public static double toDouble(String _value) {
try {
return Double.valueOf(makeNotNull(_value));
}
catch (NumberFormatException _e) {
return 0.0;
}
}
public static float toFloat(String _value) {
try {
return Float.valueOf(makeNotNull(_value));
}
catch (NumberFormatException _e) {
return 0f;
}
}
public static String urlEncode(String _url) {
try {
return URLEncoder.encode(makeNotNull(_url), "UTF-8");
}
catch (UnsupportedEncodingException e) {
return _url;
}
}
public static String urlDecode(String _url) {
try {
return URLDecoder.decode(makeNotNull(_url), "UTF-8");
}
catch (UnsupportedEncodingException e) {
return _url;
}
}
public static String makeNotNull(String _value) {
if (_value != null)
return _value;
return "";
}
public static String after(String _value, String _search) {
if (_value == null)
return "";
int iPos = _value.lastIndexOf(_search);
if (iPos < 0)
return "";
return iPos < _value.length() - _search.length() ? _value.substring(iPos + _search.length()) : "";
}
public static <T extends Enum<T>> T toEnum(Class<T> _enumType, String _sValue) {
return toEnum(_enumType, _sValue, null);
}
public static <T extends Enum<T>> T toEnum(Class<T> _enumType, String _sValue, T _default) {
T e = null;
try {
e = Enum.valueOf(_enumType, _sValue);
}
catch (Throwable t) {
return _default;
}
return e;
}
public static <T extends Enum<T>> List<T> toEnums(Class<T> _enumType, Collection<String> _values) {
List<T> listEnums = new ArrayList<T>();
for (String value : CollectionUtils.makeNotNull(_values)) {
T e = toEnum(_enumType, value, null);
if (e != null)
listEnums.add(e);
}
return listEnums;
}
public static <T extends Comparable<T>> int compare(T a, T b) {
return compare(a, b, true);
}
public static <T extends Comparable<T>> int compare(T a, T b, boolean _bNullsFirst) {
if (a != null) {
if (b != null)
return a.compareTo(b);
else
return _bNullsFirst ? 1 : -1;
}
if (b != null)
return _bNullsFirst ? -1 : 1;
return 0;
}
public static int min(int... values) {
int iMin = Integer.MAX_VALUE;
for (int value : values) {
if (value < iMin)
iMin = value;
}
return iMin;
}
public static String[] cleanSplit(String _sValue, String _sRegex) {
if (_sValue == null)
return new String[0];
return removeEmpties(_sValue.split(_sRegex));
}
public static String[] removeEmpties(String[] _arr) {
if (_arr == null)
return new String[0];
int valid = 0;
for (String s : _arr) {
if (NullUtils.isNotEmpty(s))
valid++;
}
if (valid == _arr.length)
return _arr;
String[] ret = new String[valid];
valid = 0;
for (String s : _arr) {
if (NullUtils.isNotEmpty(s))
ret[valid++] = s;
}
return ret;
}
public static String wrap(String _input, int _lineLength) {
return wrap(_input, _lineLength, false);
}
public static String wrap(String _input, int _lineLength, boolean carriageReturn) {
if (_input == null)
return null;
StringBuilder output = new StringBuilder();
int i = 0;
while (i < _input.length()) {
if ((i + _lineLength) > _input.length())
output.append(_input.substring(i, _input.length()));
else {
output.append(_input.substring(i, i + _lineLength));
if (carriageReturn)
output.append("\r");
output.append("\n");
}
i += _lineLength;
}
return output.toString();
}
public static <T> Class<? extends T> getClass(String _className, Class<T> _superClass) {
try {
return Class.forName(_className).asSubclass(_superClass);
}
catch (ClassNotFoundException _e) {
return null;
}
}
public static String terminateWith(String _value, String _suffix) {
if (_value == null)
return _suffix;
if (_value.endsWith(_suffix))
return _value;
return _value + _suffix;
}
public static String toUpperCase(String _value) {
if (_value == null)
return null;
return _value.toUpperCase();
}
public static String toLowerCase(String _value) {
if (_value == null)
return null;
return _value.toLowerCase();
}
public static Map<String, List<String>> parseQueryParams(String _queryString) {
Map<String, List<String>> queryParameters = new HashMap<>();
if (isEmpty(_queryString)) {
return queryParameters;
}
String[] parameters = _queryString.split("&");
for (String parameter : parameters) {
String[] keyValuePair = parameter.split("=");
if (keyValuePair.length > 1)
CollectionUtils.addToMultiMap(keyValuePair[0], keyValuePair[1], queryParameters);
}
return queryParameters;
}
public static String toQueryString(Map<String, List<String>> _queryParameters) {
StringBuilder queryString = null;
for (Entry<String, List<String>> entry : CollectionUtils.makeNotNull(_queryParameters).entrySet()) {
for (String param : CollectionUtils.makeNotNull(entry.getValue())) {
if (NullUtils.isEmpty(param))
continue;
if (queryString == null)
queryString = new StringBuilder();
else
queryString.append("&");
queryString.append(entry.getKey());
queryString.append("=");
queryString.append(param);
}
}
return queryString == null?"":queryString.toString();
}
public static String toHex(String _sValue)
{
return toHex(toByteArray(_sValue));
}
public static String toHexBytes(byte[] _btData)
{
List<String> bytes = new ArrayList<>(_btData.length);
for (byte b : _btData) {
bytes.add(String.format("%02X ", b));
}
return CollectionUtils.delimit(bytes, " ");
}
public static String toHex(byte[] _btData)
{
try
{
return new String(Hex.encodeHex(_btData));
}
catch (Exception e)
{
return "";
}
}
public static byte[] fromHex(String _sValue)
{
try
{
return Hex.decodeHex(makeNotNull(_sValue).toCharArray());
}
catch (Exception e)
{
return null;
}
}
public static int bound(int _value, int _min, int _max) {
if (_value < _min)
return _min;
if (_value > _max)
return _max;
return _value;
}
}

View File

@@ -0,0 +1,147 @@
package com.lanternsoftware.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class ResourceLoader {
protected static final Logger LOG = LoggerFactory.getLogger(ResourceLoader.class);
public static String getStringResource(Class clazz, String _sResourceFileName) {
String sReply = null;
InputStream stream = null;
try {
stream = clazz.getResourceAsStream(_sResourceFileName);
if (stream != null)
sReply = IOUtils.toString(stream);
}
catch (Exception e) {
LOG.error("Failed to load resource: " + _sResourceFileName, e);
}
finally {
IOUtils.closeQuietly(stream);
}
return sReply == null ? "" : sReply;
}
public static byte[] getByteArrayResource(Class clazz, String _sResourceFileName) {
byte[] btReply = null;
InputStream stream = null;
try {
stream = clazz.getResourceAsStream(_sResourceFileName);
if (stream != null)
btReply = IOUtils.toByteArray(stream);
}
catch (IOException e) {
LOG.error("Failed to load resource: " + _sResourceFileName, e);
}
finally {
IOUtils.closeQuietly(stream);
}
return btReply;
}
public static String loadFileAsString(String _fileName) {
return loadFileAsString(new File(_fileName));
}
public static String loadFileAsString(File _file) {
return NullUtils.toString(loadFile(_file));
}
public static List<String> loadFileLines(String _fileName) {
return loadFileLines(new File(_fileName));
}
public static List<String> loadFileLines(File _file) {
if ((_file == null) || !_file.exists())
return null;
FileReader is = null;
try {
is = new FileReader(_file);
BufferedReader reader = new BufferedReader(is);
List<String> lines = new ArrayList<>();
String line;
while ((line = reader.readLine()) != null)
{
lines.add(line);
}
return lines;
}
catch (Throwable t) {
LOG.error("Failed to load file: " + _file.getAbsolutePath(), t);
return Collections.emptyList();
}
finally {
IOUtils.closeQuietly(is);
}
}
public static byte[] loadFile(String _fileName) {
return loadFile(new File(_fileName));
}
public static byte[] loadFile(File _file) {
if ((_file == null) || !_file.exists())
return null;
InputStream is = null;
try {
is = new FileInputStream(_file);
return IOUtils.toByteArray(is);
}
catch (Throwable t) {
LOG.error("Failed to load file: " + _file.getAbsolutePath(), t);
return null;
}
finally {
IOUtils.closeQuietly(is);
}
}
public static void writeFile(String _sFile, String _data) {
writeFile(_sFile, NullUtils.toByteArray(_data));
}
public static void writeFile(String _sFile, byte[] _btData) {
FileOutputStream os = null;
try {
os = new FileOutputStream(_sFile, false);
os.write(_btData);
}
catch (Throwable t) {
LOG.error("Failed to write file: " + _sFile, t);
}
finally {
IOUtils.closeQuietly(os);
}
}
public static void writeFileLines(String _sFile, List<String> _lines) {
FileOutputStream os = null;
try {
os = new FileOutputStream(_sFile, false);
for (String line : CollectionUtils.makeNotNull(_lines)) {
os.write(NullUtils.toByteArray(line));
os.write((char)10);
}
}
catch (Throwable t) {
LOG.error("Failed to write file: " + _sFile, t);
}
finally {
IOUtils.closeQuietly(os);
}
}
}

View File

@@ -0,0 +1,51 @@
package com.lanternsoftware.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class ZipUtils {
private static final Logger LOG = LoggerFactory.getLogger(ZipUtils.class);
public static byte[] zip(byte[] _btData) {
if (_btData == null)
return null;
ByteArrayOutputStream out = new ByteArrayOutputStream();
GZIPOutputStream stream = null;
try {
stream = new GZIPOutputStream(out);
stream.write(_btData);
IOUtils.closeQuietly(stream);
return out.toByteArray();
}
catch (IOException e) {
IOUtils.closeQuietly(stream);
LOG.error("Failed to zip data", e);
return null;
}
}
public static byte[] unzip(byte[] _btData) {
if ((_btData == null) || (_btData.length == 0))
return null;
ByteArrayInputStream in = new ByteArrayInputStream(_btData);
GZIPInputStream stream = null;
try {
stream = new GZIPInputStream(in);
return IOUtils.toByteArray(stream);
}
catch (IOException e) {
LOG.error("Failed to unzip data", e);
return null;
}
finally {
IOUtils.closeQuietly(stream);
}
}
}

View File

@@ -0,0 +1,132 @@
package com.lanternsoftware.util.concurrency;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Future;
public abstract class ConcurrencyUtils
{
private static final Object m_mutex = new Object();
private static Map<String, String> mapMutexes = null;
public static void sleep(long _lDuration)
{
try
{
Thread.sleep(_lDuration);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
public static void wait(Object _object)
{
try
{
synchronized(_object)
{
_object.wait();
}
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
public static void wait(Object _object, int _iTimeout)
{
try
{
synchronized(_object)
{
_object.wait(_iTimeout);
}
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
public static void notify(Object _object)
{
try
{
synchronized(_object)
{
_object.notify();
}
}
catch (IllegalMonitorStateException e)
{
e.printStackTrace();
}
}
public static void notifyAll(Object _object)
{
try
{
synchronized(_object)
{
_object.notifyAll();
}
}
catch (IllegalMonitorStateException e)
{
e.printStackTrace();
}
}
public static String getMutex(String _sKey)
{
synchronized (m_mutex)
{
if (mapMutexes == null)
mapMutexes = new HashMap<String, String>();
String sMutex = mapMutexes.get(_sKey);
if (sMutex != null)
return sMutex;
mapMutexes.put(_sKey, _sKey);
return _sKey;
}
}
public static void destroy()
{
if (mapMutexes != null)
{
mapMutexes.clear();
mapMutexes = null;
}
}
public static void getAll(Future<?>... _futures)
{
if (_futures == null)
return;
getAll(Arrays.asList(_futures));
}
public static void getAll(Collection<Future<?>> _collFutures)
{
if (_collFutures == null)
return;
for (Future<?> f : _collFutures)
{
try
{
f.get();
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.concurrency;
import java.util.concurrent.Callable;
public abstract class Execution implements Callable<ExecutionResult> {
public abstract void run() throws Exception;
public final ExecutionResult call(){
try {
run();
}
catch (Exception _e) {
return new ExecutionResult(_e);
}
return new ExecutionResult(null);
}
}

View File

@@ -0,0 +1,13 @@
package com.lanternsoftware.util.concurrency;
public class ExecutionResult {
private final Exception e;
public ExecutionResult(Exception _e) {
e = _e;
}
public Exception getException() {
return e;
}
}

View File

@@ -0,0 +1,88 @@
package com.lanternsoftware.util.concurrency;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Future;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
public abstract class ExecutionUtil {
private static final Logger LOG = LoggerFactory.getLogger(ExecutionUtil.class);
public static void waitForExecution(Collection<Future<ExecutionResult>> futures) throws Exception {
Exception e = null;
for (Future<ExecutionResult> f : futures) {
ExecutionResult result = f.get();
if (result.getException() != null)
e = result.getException();
}
if (e != null)
throw e;
}
public static void waitForExecution(Logger _logger, Collection<Future<?>> futures) {
for (Future<?> f : futures) {
try {
f.get();
}
catch (Exception _e) {
_logger.error("Exception occurred during execution", _e);
}
}
}
public static void waitForExecution(Future<ExecutionResult>... futures) throws Exception {
Exception e = null;
for (Future<ExecutionResult> f : futures) {
ExecutionResult result = f.get();
if (result.getException() != null)
e = result.getException();
}
if (e != null)
throw e;
}
public static void waitForExecution(Logger _logger, Future<?>... futures) {
for (Future<?> f : futures) {
try {
f.get();
}
catch (Exception _e) {
_logger.error("Exception occurred during execution", _e);
}
}
}
public static <T> T get(Future<T> _futures) {
return CollectionUtils.getFirst(getAll(Collections.singletonList(_futures)));
}
public static <T> List<T> getAll(Future<T>... _futures) {
return getAll(Arrays.asList(_futures));
}
public static <T> List<T> getAll(Collection<Future<T>> _futures) {
return getAll(_futures, false);
}
public static <T> List<T> getAll(Collection<Future<T>> _futures, boolean _includeNulls) {
List<T> ret = new ArrayList<>();
for (Future<T> future : CollectionUtils.makeNotNull(_futures)) {
try {
T t = future.get();
if (_includeNulls || (t != null))
ret.add(t);
}
catch (Exception e) {
LOG.error("Exception while getting future", e);
}
}
return ret;
}
}

View File

@@ -0,0 +1,13 @@
package com.lanternsoftware.util.concurrency;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public abstract class ExecutorUtil {
public static ThreadPoolExecutor fixedThreadPool(int _threadCount) {
ThreadPoolExecutor executor = new ThreadPoolExecutor(_threadCount, _threadCount,60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
executor.allowCoreThreadTimeOut(true);
return executor;
}
}

View File

@@ -0,0 +1,284 @@
package com.lanternsoftware.util.cryptography;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.LongBuffer;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.KeySpec;
import java.util.Arrays;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
public class AESTool {
private static final Logger LOG = LoggerFactory.getLogger(AESTool.class);
private final SecretKey key;
private final static SecureRandom rng = rng();
private final byte[] iv;
private static SecureRandom rng() {
try {
SecureRandom rng = SecureRandom.getInstance("SHA1PRNG");
rng.generateSeed(16);
return rng;
} catch (NoSuchAlgorithmException e) {
LOG.error("Failed to initialize SecureRandom with SHA1PRNG", e);
return null;
}
}
public static byte[] randomIV() {
return rng.generateSeed(16);
}
/**
* @return a randomly generated AES secret key
*/
public static SecretKey generateRandomSecretKey() {
try {
SecretKeyFactory factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
KeySpec spec = new PBEKeySpec(Base64.encodeBase64String(new SecureRandom().generateSeed(32)).toCharArray(), new SecureRandom().generateSeed(32), 65536, 256);
SecretKey key = factory.generateSecret(spec);
return new SecretKeySpec(key.getEncoded(), "AES");
} catch (Exception e) {
LOG.error("Failed to generate a random AES secret key", e);
return null;
}
}
public static void printRandomSecretKey() {
SecretKey key = generateRandomSecretKey();
byte[] btKey = key.getEncoded();
StringBuilder builder = null;
for (long lValue : toLongs(btKey)) {
if (builder == null)
builder = new StringBuilder("new AESTool(");
else
builder.append(",");
builder.append(lValue);
builder.append("L");
}
builder.append(");");
System.out.println(builder.toString());
}
/**
* @param _btKey the encoded form of a {@link SecretKey} object. See the {@link SecretKey#getEncoded()} method.
*/
public AESTool(byte[] _btKey) {
this(new SecretKeySpec(_btKey, "AES"));
}
/**
* @param _btKey the encoded form of a {@link SecretKey} object. See the {@link SecretKey#getEncoded()} method.
* @param _iv the initialization vector to use. If this is set, every call of encrypt for a given input will produce the same output. If null is passed, every call of encrypt for a given input will generate a random IV and the output will be different each time (recommended).
*/
public AESTool(byte[] _btKey, byte[] _iv) {
this(new SecretKeySpec(_btKey, "AES"), _iv);
}
/**
* @param _arrKey the encoded form of a {@link SecretKey} object converted to an array of long values using the {@link AESTool#toLongs(byte[])} method. See the {@link SecretKey#getEncoded()} method.
*/
public AESTool(long... _arrKey) {
this(new SecretKeySpec(toByteArray(_arrKey), "AES"));
}
/**
* @param _arrKey the encoded form of a {@link SecretKey} object converted to an array of long values using the {@link AESTool#toLongs(byte[])} method. See the {@link SecretKey#getEncoded()} method.
* @param _iv the initialization vector to use. If this is set, every call of encrypt for a given input will produce the same output. If null is passed, every call of encrypt for a given input will generate a random IV and the output will be different each time (recommended).
*/
public AESTool(byte[] _iv, long... _arrKey) {
this(new SecretKeySpec(toByteArray(_arrKey), "AES"), _iv);
}
public AESTool(SecretKey _key) {
this(_key, null);
}
public AESTool(SecretKey _key, byte[] _iv) {
key = _key;
if ((_iv != null) && (_iv.length != 16))
throw new RuntimeException("Initialization Vector must be null or exactly 16 bytes in length");
iv = _iv;
}
/**
* @param _data a string to be encrypted with this tool's secret key
* @return the encrypted data as a base64 encoded string
*/
public String encryptToBase64(String _data) {
return encryptToBase64(toByteArray(_data));
}
/**
* @param _btData the binary data to be encrypted with this tool's secret key
* @return the encrypted data as a base64 encoded string
*/
public String encryptToBase64(byte[] _btData) {
if (_btData == null)
return null;
return Base64.encodeBase64String(encrypt(_btData));
}
/**
* @param _data a string to be encrypted with this tool's secret key
* @return the encrypted data as a url safe base64 encoded string
*/
public String encryptToUrlSafeBase64(String _data) {
return encryptToUrlSafeBase64(toByteArray(_data));
}
/**
* @param _btData the binary data to be encrypted with this tool's secret key
* @return the encrypted data as a url safe base64 encoded string
*/
public String encryptToUrlSafeBase64(byte[] _btData) {
if (_btData == null)
return null;
return Base64.encodeBase64URLSafeString(encrypt(_btData));
}
/**
* @param _data a string to be encrypted with this tool's secret key
* @return the encrypted data in binary form
*/
public byte[] encrypt(String _data) {
return encrypt(toByteArray(_data));
}
/**
* @param _btData the binary data to be encrypted with this tool's secret key
* @return the encrypted data in binary form
*/
public byte[] encrypt(byte[] _btData) {
if (_btData == null)
return null;
try {
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
byte[] btIV = (iv != null) ? iv : randomIV();
cipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(btIV));
if (iv != null)
return cipher.doFinal(_btData);
else {
byte[] btSalt = rng.generateSeed(16);
return CollectionUtils.merge(btIV, cipher.doFinal(CollectionUtils.merge(btSalt, _btData)));
}
} catch (Exception e) {
LOG.error("Failed to encrypt data", e);
return null;
}
}
/**
* @param _base64 the base64 encoded representation of the aes encrypted byte array to be decrypted with this tool's
* secret key
* @return the decrypted byte array transformed to a string.
*/
public String decryptFromBase64ToString(String _base64) {
return toString(decryptFromBase64(_base64));
}
/**
* @param _base64 the base64 encoded representation of the aes encrypted byte array to be decrypted with this tool's
* secret key
* @return the decrypted byte array
*/
public byte[] decryptFromBase64(String _base64) {
return _base64 == null ? null : decrypt(Base64.decodeBase64(_base64));
}
/**
* @param _btData the encrypted byte array to be decrypted with this tool's secret key
* @return the decrypted byte array transformed to a string
*/
public String decryptToString(byte[] _btData) {
return toString(decrypt(_btData));
}
/**
* @param _btData the encrypted byte array to be decrypted with this tool's secret key
* @return the decrypted byte array
*/
public byte[] decrypt(byte[] _btData) {
if (_btData == null)
return null;
try {
Cipher decipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
if (iv == null) {
decipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(Arrays.copyOfRange(_btData, 0, 16)));
byte[] btData = decipher.doFinal(Arrays.copyOfRange(_btData, 16, _btData.length));
return Arrays.copyOfRange(btData, 16, btData.length);
} else {
decipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv));
return decipher.doFinal(_btData);
}
} catch (Exception e) {
LOG.error("Failed to decrypt data", e);
return null;
}
}
/**
* @param _btData a byte array to convert to an array of longs
* @return the array of long values that contains the data from the byte array.
*/
public static long[] toLongs(byte[] _btData) {
if (_btData == null)
return null;
long[] lData = new long[_btData.length / 8];
LongBuffer data = ByteBuffer.wrap(_btData).order(ByteOrder.BIG_ENDIAN).asLongBuffer();
data.get(lData);
return lData;
}
/**
* @param _arrLongs an array of longs to convert into a byte array representing the same data
* @return the converted byte array
*/
public static byte[] toByteArray(long... _arrLongs) {
ByteBuffer input = ByteBuffer.allocate(_arrLongs.length * 8).order(ByteOrder.BIG_ENDIAN);
for (long lInput : _arrLongs) {
input.putLong(lInput);
}
return input.array();
}
/**
* Handles and logs the missing encoding exception that will never happen.
*
* @param _btString the UTF-8 encoded representation of a string
* @return the String object created from the byte array
*/
public static String toString(byte[] _btString) {
if (_btString == null)
return null;
return new String(_btString, StandardCharsets.UTF_8);
}
/**
* Handles and logs the missing encoding exception that will never happen.
*
* @param _value the string to turn into a byte array
* @return the UTF-8 encoded byte array representation of the string
*/
public static byte[] toByteArray(String _value) {
if (_value == null)
return null;
return _value.getBytes(StandardCharsets.UTF_8);
}
}

View File

@@ -0,0 +1,220 @@
package com.lanternsoftware.util.cryptography;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.math.BigInteger;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateFactory;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.RSAPrivateKeySpec;
import java.security.spec.RSAPublicKeySpec;
import java.security.spec.X509EncodedKeySpec;
import com.lanternsoftware.util.NullUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
public abstract class RSAUtils {
private static final Logger LOG = LoggerFactory.getLogger(RSAUtils.class);
public static KeyPair generateRandomRSAKeyPair() {
try {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA");
keyPairGenerator.initialize(2048);
return keyPairGenerator.genKeyPair();
}
catch (NoSuchAlgorithmException _e) {
LOG.error("Failed to generate RSA key pair", _e);
return null;
}
}
public static String toString(RSAPrivateKey _key) {
if (_key == null)
return null;
StringBuilder b = new StringBuilder(Base64.encodeBase64String(_key.getModulus().toByteArray()));
b.append(",");
b.append(Base64.encodeBase64String(_key.getPrivateExponent().toByteArray()));
return b.toString();
}
public static String toString(RSAPublicKey _key) {
if (_key == null)
return null;
StringBuilder b = new StringBuilder(Base64.encodeBase64String(_key.getModulus().toByteArray()));
b.append(",");
b.append(Base64.encodeBase64String(_key.getPublicExponent().toByteArray()));
return b.toString();
}
public static String toPEM(RSAPublicKey _key) {
StringBuilder pem = new StringBuilder("-----BEGIN PUBLIC KEY-----\r\n");
pem.append(NullUtils.wrap(Base64.encodeBase64String(_key.getEncoded()), 64, true));
pem.append("\r\n-----END PUBLIC KEY-----");
return pem.toString();
}
public static String toPEM(Certificate _cert) {
try {
StringBuilder pem = new StringBuilder("-----BEGIN CERTIFICATE-----\r\n");
pem.append(NullUtils.wrap(Base64.encodeBase64String(_cert.getEncoded()), 64, true));
pem.append("\r\n-----END CERTIFICATE-----");
return pem.toString();
} catch (CertificateEncodingException _e) {
LOG.error("Failed to generate certificate PEM", _e);
return null;
}
}
public static String toPEM(RSAPrivateKey _key) {
StringBuilder pem = new StringBuilder("-----BEGIN RSA PRIVATE KEY-----\r\n");
pem.append(NullUtils.wrap(Base64.encodeBase64String(_key.getEncoded()), 64, true));
pem.append("\r\n-----END RSA PRIVATE KEY-----");
return pem.toString();
}
public static RSAPrivateKey toPrivateKey(String _privateKey64) {
try {
String[] parts = NullUtils.makeNotNull(_privateKey64).split(",");
if (CollectionUtils.size(parts) == 2) {
KeyFactory fact = KeyFactory.getInstance("RSA");
RSAPrivateKeySpec keySpec = new RSAPrivateKeySpec(new BigInteger(Base64.decodeBase64(parts[0])), new BigInteger(Base64.decodeBase64(parts[1])));
return (RSAPrivateKey) fact.generatePrivate(keySpec);
}
}
catch (Exception _e) {
LOG.error("Failed to generate RSA private key", _e);
}
return null;
}
public static RSAPublicKey toPublicKey(String _publicKey64) {
try {
String[] parts = NullUtils.makeNotNull(_publicKey64).split(",");
if (CollectionUtils.size(parts) == 2) {
KeyFactory fact = KeyFactory.getInstance("RSA");
RSAPublicKeySpec keySpec = new RSAPublicKeySpec(new BigInteger(Base64.decodeBase64(parts[0])), new BigInteger(Base64.decodeBase64(parts[1])));
return (RSAPublicKey) fact.generatePublic(keySpec);
}
}
catch (Exception _e) {
LOG.error("Failed to generate RSA public key", _e);
}
return null;
}
public static RSAPublicKey fromPEMtoPublicKey(String _pem) {
if (_pem == null)
return null;
String pem = _pem.replaceAll("(-+BEGIN PUBLIC KEY-+|-+END PUBLIC KEY-+|\\r|\\n)", "");
X509EncodedKeySpec spec = new X509EncodedKeySpec(Base64.decodeBase64(pem));
try {
KeyFactory fact = KeyFactory.getInstance("RSA");
return (RSAPublicKey) fact.generatePublic(spec);
}
catch (Exception _e) {
LOG.error("Failed to generate RSA public key", _e);
return null;
}
}
public static Certificate fromPEMtoCertificate(String _pem) {
String pem = _pem.replaceAll("(-+BEGIN CERTIFICATE-+|-+END CERTIFICATE-+|\\r|\\n)", "");
ByteArrayInputStream is = null;
try {
is = new ByteArrayInputStream(Base64.decodeBase64(pem));
CertificateFactory cf = CertificateFactory.getInstance("X.509");
return cf.generateCertificate(is);
}
catch (Exception _e) {
LOG.error("Failed to generate RSA certificate", _e);
return null;
}
finally {
IOUtils.closeQuietly(is);
}
}
public static Certificate loadCert(String _keystoreFileName, String _keystorePassword, String _certAlias) {
return loadCert(loadKeystore(_keystoreFileName, _keystorePassword), _certAlias);
}
public static Certificate loadCert(InputStream _is, String _keystorePassword, String _certAlias) {
return loadCert(loadKeystore(_is, _keystorePassword), _certAlias);
}
public static Certificate loadCert(KeyStore _keystore, String _certAlias) {
try {
return _keystore.getCertificate(_certAlias);
}
catch (Exception e) {
LOG.error("Failed to load certificate {}", e.getMessage(), e);
return null;
}
}
public static PrivateKey loadPrivateKey(String _keystoreFileName, String _password, String _certAlias) {
return loadPrivateKey(_keystoreFileName, _password, _password, _certAlias);
}
public static PrivateKey loadPrivateKey(InputStream _is, String _password, String _certAlias) {
return loadPrivateKey(_is, _password, _password, _certAlias);
}
public static PrivateKey loadPrivateKey(String _keystoreFileName, String _keystorePassword, String _certPassword, String _certAlias) {
return getPrivateKey(loadKeystore(_keystoreFileName, _keystorePassword), _certPassword, _certAlias);
}
public static PrivateKey loadPrivateKey(InputStream _is, String _keystorePassword, String _certPassword, String _certAlias) {
return getPrivateKey(loadKeystore(_is, _keystorePassword), _certPassword, _certAlias);
}
public static PrivateKey getPrivateKey(KeyStore _keystore, String _certPassword, String _certAlias) {
try {
return (PrivateKey) _keystore.getKey(_certAlias, _certPassword.toCharArray());
}
catch (Exception e) {
LOG.error("Failed to load key: {}", e.getMessage(), e);
return null;
}
}
public static KeyStore loadKeystore(String _keystoreFileName, String _keystorePassword) {
try {
return loadKeystore(new FileInputStream(_keystoreFileName), _keystorePassword);
}
catch (Exception e) {
LOG.error("Failed to load keystore: {}", e.getMessage(), e);
return null;
}
}
public static KeyStore loadKeystore(InputStream _is, String _keystorePassword) {
try {
KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
keystore.load(_is, _keystorePassword.toCharArray());
return keystore;
}
catch (Exception e) {
LOG.error("Failed to load keystore: {}", e.getMessage(), e);
return null;
}
finally {
IOUtils.closeQuietly(_is);
}
}
}

View File

@@ -0,0 +1,102 @@
package com.lanternsoftware.util.csv;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import java.util.ArrayList;
import java.util.List;
public class CSV {
public final int columns;
public final int rows;
private final List<String> header;
private final List<CSVCell[]> data;
public CSV() {
this(null, null, 0);
}
public CSV(List<String> _header, List<List<String>> _data, int _columnCount) {
header = _header;
if (_data == null) {
columns = 0;
rows = 0;
data = new ArrayList<>(0);
}
else {
rows = _data.size();
columns = _columnCount;
data = new ArrayList<>(rows);
for (List<String> listSourceRow : _data) {
CSVCell[] row = new CSVCell[columns];
int iCol = 0;
for (String sCol : CollectionUtils.makeNotNull(listSourceRow)) {
row[iCol] = new CSVCell(sCol, sCol);
iCol++;
}
while (iCol < columns) {
row[iCol] = new CSVCell("", "");
iCol++;
}
data.add(row);
}
}
}
public CSV(List<String> _header, List<List<CSVCell>> _data) {
header = _header;
if (_data == null) {
columns = 0;
rows = 0;
data = new ArrayList<>(0);
}
else {
rows = _data.size();
data = new ArrayList<>(rows);
int iMaxColumn = CollectionUtils.size(header);
for (List<CSVCell> listSourceRow : _data) {
iMaxColumn = Math.max(iMaxColumn, CollectionUtils.size(listSourceRow));
}
columns = iMaxColumn;
for (List<CSVCell> listSourceRow : _data) {
CSVCell[] row = new CSVCell[columns];
int iCol = 0;
for (CSVCell cell : CollectionUtils.makeNotNull(listSourceRow)) {
row[iCol] = cell == null ? new CSVCell("", "") : cell;
iCol++;
}
while (iCol < columns) {
row[iCol] = new CSVCell("", "");
iCol++;
}
data.add(row);
}
}
}
public String cell(int _row, int _column) {
if ((_row < 0) || (_row >= rows) || (_column < 0) || (_column >= columns))
return "";
CSVCell cell = data.get(_row)[_column];
if ((cell == null) || (cell.display == null))
return "";
return cell.display;
}
public List<String> getHeaders() {
return header;
}
public String getHeader(int _column) {
return NullUtils.makeNotNull(CollectionUtils.get(header, _column));
}
public int getRows() {
return rows;
}
public int getColumns() {
return columns;
}
}

View File

@@ -0,0 +1,59 @@
package com.lanternsoftware.util.csv;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.lanternsoftware.util.NullUtils;
public class CSVCell implements Comparable<CSVCell> {
public final String display;
public final Object comparable;
public final boolean reverseSort;
public CSVCell(String _display) {
this(_display, _display);
}
public CSVCell(String _display, Object _comparable) {
this(_display, _comparable, false);
}
public CSVCell(String _display, Object _comparable, boolean _reverseSort) {
display = _display;
comparable = _comparable;
reverseSort = _reverseSort;
}
@Override
public int compareTo(CSVCell _o) {
if (_o == null)
return 1;
Object type = (comparable == null) ? _o.comparable : comparable;
if (type instanceof String)
return NullUtils.compare((String) comparable, (String) _o.comparable, false);
if (type instanceof Date)
return NullUtils.compare((Date) comparable, (Date) _o.comparable, false);
if (type instanceof Integer)
return NullUtils.compare((Integer) comparable, (Integer) _o.comparable, false);
if (type instanceof Long)
return NullUtils.compare((Long) comparable, (Long) _o.comparable, false);
if (type instanceof Double)
return NullUtils.compare((Double) comparable, (Double) _o.comparable, false);
if (type instanceof Boolean)
return NullUtils.compare((Boolean) comparable, (Boolean) _o.comparable, false);
if (type instanceof Float)
return NullUtils.compare((Float) comparable, (Float) _o.comparable, false);
return 0;
}
public static List<CSVCell> asList(String... _data) {
if (_data == null)
return new ArrayList<CSVCell>(0);
List<CSVCell> listCells = new ArrayList<CSVCell>(_data.length);
for (String data : _data) {
listCells.add(new CSVCell(data));
}
return listCells;
}
}

View File

@@ -0,0 +1,132 @@
package com.lanternsoftware.util.csv;
import org.apache.commons.io.IOUtils;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
public abstract class CSVReader
{
public static CSV loadCSVFromFile(String _filename)
{
return loadCSVFromFile(_filename, false);
}
public static CSV loadCSVFromFile(String _filename, boolean _firstRowIsHeader)
{
FileInputStream is = null;
try
{
int iMaxColumns = 0;
List<List<String>> listLines = new LinkedList<List<String>>();
is = new FileInputStream(_filename);
BufferedReader r = new BufferedReader(new InputStreamReader(is));
List<String> listHeader = null;
List<String> listCurLine = new ArrayList<String>();
String sRemainder = null;
String sLine = r.readLine();
while (sLine != null)
{
if (sRemainder == null)
sRemainder = parseLine(sLine, listCurLine);
else
sRemainder = parseLine(sRemainder+sLine, listCurLine);
if (sRemainder == null)
{
if (_firstRowIsHeader && (listHeader == null))
listHeader = listCurLine;
else
listLines.add(listCurLine);
iMaxColumns = Math.max(iMaxColumns, listCurLine.size());
listCurLine = new ArrayList<String>();
}
sLine = r.readLine();
}
return new CSV(listHeader, listLines, iMaxColumns);
}
catch (Throwable t)
{
t.printStackTrace();
return null;
}
finally
{
IOUtils.closeQuietly(is);
}
}
public static CSV parseCSV(String _csv)
{
return parseCSV(_csv, false);
}
public static CSV parseCSV(String _csv, boolean _bFirstRowIsHeader)
{
_csv = _csv.replace("\r","");
int iMaxColumns = 0;
List<List<String>> listLines = new LinkedList<List<String>>();
List<String> listHeader = null;
List<String> listCurLine = new ArrayList<String>();
String sRemainder = null;
for (String sLine : _csv.split("\n"))
{
if (sRemainder == null)
sRemainder = parseLine(sLine, listCurLine);
else
sRemainder = parseLine(sRemainder+sLine, listCurLine);
if (sRemainder == null)
{
if (_bFirstRowIsHeader && (listHeader == null))
listHeader = listCurLine;
else
listLines.add(listCurLine);
iMaxColumns = Math.max(iMaxColumns, listCurLine.size());
listCurLine = new ArrayList<String>();
}
}
return new CSV(listHeader, listLines, iMaxColumns);
}
public static String parseLine(String _sLine, List<String> _listCurLine)
{
int i=0;
while (i < _sLine.length())
{
if (_sLine.charAt(i) == '"')
{
int iPos = _sLine.indexOf("\",",i+1);
if (iPos < 0)
{
if (!_sLine.endsWith("\""))
return _sLine.substring(i);
else
{
_listCurLine.add(_sLine.substring(i+1, _sLine.length()-1));
return null;
}
}
else
{
_listCurLine.add(_sLine.substring(i+1, iPos));
i = iPos+2;
}
}
else
{
int iPos = _sLine.indexOf(",",i);
if (iPos < 0)
{
_listCurLine.add(_sLine.substring(i));
return null;
}
_listCurLine.add(_sLine.substring(i, iPos));
i = iPos+1;
}
}
return null;
}
}

View File

@@ -0,0 +1,528 @@
package com.lanternsoftware.util.email;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class EmailValidator {
private static final String SPECIAL_CHARS = "\\p{Cntrl}\\(\\)<>@,;:'\\\\\\\"\\.\\[\\]";
private static final String VALID_CHARS = "[^\\s" + SPECIAL_CHARS + "]";
private static final String QUOTED_USER = "(\"[^\"]*\")";
private static final String WORD = "((" + VALID_CHARS + "|')+|" + QUOTED_USER + ")";
private static final String LEGAL_ASCII_REGEX = "^\\p{ASCII}+$";
private static final String EMAIL_REGEX = "^\\s*?(.+)@(.+?)\\s*$";
private static final String IP_DOMAIN_REGEX = "^\\[(.*)\\]$";
private static final String USER_REGEX = "^\\s*" + WORD + "(\\." + WORD + ")*$";
private static final Pattern MATCH_ASCII_PATTERN = Pattern.compile(LEGAL_ASCII_REGEX);
private static final Pattern EMAIL_PATTERN = Pattern.compile(EMAIL_REGEX);
private static final Pattern IP_DOMAIN_PATTERN = Pattern.compile(IP_DOMAIN_REGEX);
private static final Pattern USER_PATTERN = Pattern.compile(USER_REGEX);
private static final String IPV4_REGEX = "^(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})$";
private final RegexValidator ipv4Validator = new RegexValidator(IPV4_REGEX);
private static final String DOMAIN_LABEL_REGEX = "\\p{Alnum}(?>[\\p{Alnum}-]*\\p{Alnum})*";
private static final String TOP_LABEL_REGEX = "\\p{Alpha}{2,}";
private static final String DOMAIN_NAME_REGEX = "^(?:" + DOMAIN_LABEL_REGEX + "\\.)+" + "(" + TOP_LABEL_REGEX + ")$";
/** RegexValidator for matching domains. */
private final RegexValidator domainRegex = new RegexValidator(DOMAIN_NAME_REGEX);
private static final String[] INFRASTRUCTURE_TLDS = new String[] { "arpa", "root" };
private static final String[] GENERIC_TLDS = new String[] { "aero", // air transport industry
"asia", // Pan-Asia/Asia Pacific
"biz", // businesses
"cat", // Catalan linguistic/cultural community
"com", // commercial enterprises
"coop", // cooperative associations
"info", // informational sites
"jobs", // Human Resource managers
"mobi", // mobile products and services
"museum", // museums, surprisingly enough
"name", // individuals' sites
"net", // internet support infrastructure/business
"org", // noncommercial organizations
"pro", // credentialed professionals and entities
"tel", // contact data for businesses and individuals
"travel", // entities in the travel industry
"gov", // United States Government
"edu", // accredited postsecondary US education entities
"mil", // United States Military
"int" // organizations established by international treaty
};
private static final String[] COUNTRY_CODE_TLDS = new String[] { "ac", // Ascension Island
"ad", // Andorra
"ae", // United Arab Emirates
"af", // Afghanistan
"ag", // Antigua and Barbuda
"ai", // Anguilla
"al", // Albania
"am", // Armenia
"an", // Netherlands Antilles
"ao", // Angola
"aq", // Antarctica
"ar", // Argentina
"as", // American Samoa
"at", // Austria
"au", // Australia (includes Ashmore and Cartier Islands and Coral Sea Islands)
"aw", // Aruba
"ax", // Åland
"az", // Azerbaijan
"ba", // Bosnia and Herzegovina
"bb", // Barbados
"bd", // Bangladesh
"be", // Belgium
"bf", // Burkina Faso
"bg", // Bulgaria
"bh", // Bahrain
"bi", // Burundi
"bj", // Benin
"bm", // Bermuda
"bn", // Brunei Darussalam
"bo", // Bolivia
"br", // Brazil
"bs", // Bahamas
"bt", // Bhutan
"bv", // Bouvet Island
"bw", // Botswana
"by", // Belarus
"bz", // Belize
"ca", // Canada
"cc", // Cocos (Keeling) Islands
"cd", // Democratic Republic of the Congo (formerly Zaire)
"cf", // Central African Republic
"cg", // Republic of the Congo
"ch", // Switzerland
"ci", // Côte d'Ivoire
"ck", // Cook Islands
"cl", // Chile
"cm", // Cameroon
"cn", // China, mainland
"co", // Colombia
"cr", // Costa Rica
"cu", // Cuba
"cv", // Cape Verde
"cx", // Christmas Island
"cy", // Cyprus
"cz", // Czech Republic
"de", // Germany
"dj", // Djibouti
"dk", // Denmark
"dm", // Dominica
"do", // Dominican Republic
"dz", // Algeria
"ec", // Ecuador
"ee", // Estonia
"eg", // Egypt
"er", // Eritrea
"es", // Spain
"et", // Ethiopia
"eu", // European Union
"fi", // Finland
"fj", // Fiji
"fk", // Falkland Islands
"fm", // Federated States of Micronesia
"fo", // Faroe Islands
"fr", // France
"ga", // Gabon
"gb", // Great Britain (United Kingdom)
"gd", // Grenada
"ge", // Georgia
"gf", // French Guiana
"gg", // Guernsey
"gh", // Ghana
"gi", // Gibraltar
"gl", // Greenland
"gm", // The Gambia
"gn", // Guinea
"gp", // Guadeloupe
"gq", // Equatorial Guinea
"gr", // Greece
"gs", // South Georgia and the South Sandwich Islands
"gt", // Guatemala
"gu", // Guam
"gw", // Guinea-Bissau
"gy", // Guyana
"hk", // Hong Kong
"hm", // Heard Island and McDonald Islands
"hn", // Honduras
"hr", // Croatia (Hrvatska)
"ht", // Haiti
"hu", // Hungary
"id", // Indonesia
"ie", // Ireland (Éire)
"il", // Israel
"im", // Isle of Man
"in", // India
"io", // British Indian Ocean Territory
"iq", // Iraq
"ir", // Iran
"is", // Iceland
"it", // Italy
"je", // Jersey
"jm", // Jamaica
"jo", // Jordan
"jp", // Japan
"ke", // Kenya
"kg", // Kyrgyzstan
"kh", // Cambodia (Khmer)
"ki", // Kiribati
"km", // Comoros
"kn", // Saint Kitts and Nevis
"kp", // North Korea
"kr", // South Korea
"kw", // Kuwait
"ky", // Cayman Islands
"kz", // Kazakhstan
"la", // Laos (currently being marketed as the official domain for Los Angeles)
"lb", // Lebanon
"lc", // Saint Lucia
"li", // Liechtenstein
"lk", // Sri Lanka
"lr", // Liberia
"ls", // Lesotho
"lt", // Lithuania
"lu", // Luxembourg
"lv", // Latvia
"ly", // Libya
"ma", // Morocco
"mc", // Monaco
"md", // Moldova
"me", // Montenegro
"mg", // Madagascar
"mh", // Marshall Islands
"mk", // Republic of Macedonia
"ml", // Mali
"mm", // Myanmar
"mn", // Mongolia
"mo", // Macau
"mp", // Northern Mariana Islands
"mq", // Martinique
"mr", // Mauritania
"ms", // Montserrat
"mt", // Malta
"mu", // Mauritius
"mv", // Maldives
"mw", // Malawi
"mx", // Mexico
"my", // Malaysia
"mz", // Mozambique
"na", // Namibia
"nc", // New Caledonia
"ne", // Niger
"nf", // Norfolk Island
"ng", // Nigeria
"ni", // Nicaragua
"nl", // Netherlands
"no", // Norway
"np", // Nepal
"nr", // Nauru
"nu", // Niue
"nz", // New Zealand
"om", // Oman
"pa", // Panama
"pe", // Peru
"pf", // French Polynesia With Clipperton Island
"pg", // Papua New Guinea
"ph", // Philippines
"pk", // Pakistan
"pl", // Poland
"pm", // Saint-Pierre and Miquelon
"pn", // Pitcairn Islands
"pr", // Puerto Rico
"ps", // Palestinian territories (PA-controlled West Bank and Gaza Strip)
"pt", // Portugal
"pw", // Palau
"py", // Paraguay
"qa", // Qatar
"re", // Réunion
"ro", // Romania
"rs", // Serbia
"ru", // Russia
"rw", // Rwanda
"sa", // Saudi Arabia
"sb", // Solomon Islands
"sc", // Seychelles
"sd", // Sudan
"se", // Sweden
"sg", // Singapore
"sh", // Saint Helena
"si", // Slovenia
"sj", // Svalbard and Jan Mayen Islands Not in use (Norwegian dependencies; see .no)
"sk", // Slovakia
"sl", // Sierra Leone
"sm", // San Marino
"sn", // Senegal
"so", // Somalia
"sr", // Suriname
"st", // São Tomé and Príncipe
"su", // Soviet Union (deprecated)
"sv", // El Salvador
"sy", // Syria
"sz", // Swaziland
"tc", // Turks and Caicos Islands
"td", // Chad
"tf", // French Southern and Antarctic Lands
"tg", // Togo
"th", // Thailand
"tj", // Tajikistan
"tk", // Tokelau
"tl", // East Timor (deprecated old code)
"tm", // Turkmenistan
"tn", // Tunisia
"to", // Tonga
"tp", // East Timor
"tr", // Turkey
"tt", // Trinidad and Tobago
"tv", // Tuvalu
"tw", // Taiwan, Republic of China
"tz", // Tanzania
"ua", // Ukraine
"ug", // Uganda
"uk", // United Kingdom
"um", // United States Minor Outlying Islands
"us", // United States of America
"uy", // Uruguay
"uz", // Uzbekistan
"va", // Vatican City State
"vc", // Saint Vincent and the Grenadines
"ve", // Venezuela
"vg", // British Virgin Islands
"vi", // U.S. Virgin Islands
"vn", // Vietnam
"vu", // Vanuatu
"wf", // Wallis and Futuna
"ws", // Samoa (formerly Western Samoa)
"ye", // Yemen
"yt", // Mayotte
"yu", // Serbia and Montenegro (originally Yugoslavia)
"za", // South Africa
"zm", // Zambia
"zw", // Zimbabwe
};
private static final List<String> INFRASTRUCTURE_TLD_LIST = Arrays.asList(INFRASTRUCTURE_TLDS);
private static final List<String> GENERIC_TLD_LIST = Arrays.asList(GENERIC_TLDS);
private static final Set<String> COUNTRY_CODE_TLD_LIST = new HashSet<String>(Arrays.asList(COUNTRY_CODE_TLDS));
private static final EmailValidator EMAIL_VALIDATOR = new EmailValidator();
public static EmailValidator getInstance()
{
return EMAIL_VALIDATOR;
}
public boolean isValid(String email)
{
if (email == null)
return false;
Matcher asciiMatcher = MATCH_ASCII_PATTERN.matcher(email);
if (!asciiMatcher.matches())
return false;
// Check the whole email address structure
Matcher emailMatcher = EMAIL_PATTERN.matcher(email);
if (!emailMatcher.matches())
return false;
if (email.endsWith("."))
return false;
if (!isValidUser(emailMatcher.group(1)))
return false;
if (!isValidDomain(emailMatcher.group(2)))
return false;
return true;
}
protected boolean isValidDomain(String domain)
{
// see if domain is an IP address in brackets
Matcher ipDomainMatcher = IP_DOMAIN_PATTERN.matcher(domain);
if (ipDomainMatcher.matches())
return isValidInet4Address(ipDomainMatcher.group(1));
else
return isValidTldDomain(domain);
}
protected boolean isValidInet4Address(String inet4Address)
{
// verify that address conforms to generic IPv4 format
String[] groups = ipv4Validator.match(inet4Address);
if (groups == null)
return false;
// verify that address subgroups are legal
for (int i = 0; i <= 3; i++)
{
String ipSegment = groups[i];
if (ipSegment == null || ipSegment.length() <= 0)
return false;
int iIpSegment = 0;
try
{
iIpSegment = Integer.parseInt(ipSegment);
}
catch (NumberFormatException e)
{
return false;
}
if (iIpSegment > 255)
return false;
}
return true;
}
protected boolean isValidUser(String user)
{
return USER_PATTERN.matcher(user).matches();
}
protected boolean isValidTldDomain(String domain)
{
String[] groups = domainRegex.match(domain);
if (groups != null && groups.length > 0)
return isValidTld(groups[0]);
return false;
}
protected boolean isValidTld(String tld)
{
return isValidInfrastructureTld(tld) || isValidGenericTld(tld) || isValidCountryCodeTld(tld);
}
protected boolean isValidInfrastructureTld(String iTld)
{
return INFRASTRUCTURE_TLD_LIST.contains(chompLeadingDot(iTld.toLowerCase()));
}
protected boolean isValidGenericTld(String gTld)
{
return GENERIC_TLD_LIST.contains(chompLeadingDot(gTld.toLowerCase()));
}
protected boolean isValidCountryCodeTld(String ccTld)
{
return COUNTRY_CODE_TLD_LIST.contains(chompLeadingDot(ccTld.toLowerCase()));
}
private String chompLeadingDot(String str)
{
if (str.startsWith("."))
return str.substring(1);
else
return str;
}
private class RegexValidator implements Serializable
{
private static final long serialVersionUID = -8832409930574867162L;
private final Pattern[] patterns;
/** Construct a <i>case sensitive</i> validator for a single regular expression.
*
* @param regex
* The regular expression this validator will validate against */
public RegexValidator(String regex)
{
this(regex, true);
}
/** Construct a validator for a single regular expression with the specified case sensitivity.
*
* @param regex
* The regular expression this validator will validate against
* @param caseSensitive
* when <code>true</code> matching is <i>case sensitive</i>, otherwise matching is <i>case in-sensitive</i> */
public RegexValidator(String regex, boolean caseSensitive)
{
this(new String[] { regex }, caseSensitive);
}
/** Construct a validator that matches any one of the set of regular expressions with the specified case sensitivity.
*
* @param regexs
* The set of regular expressions this validator will validate against
* @param caseSensitive
* when <code>true</code> matching is <i>case sensitive</i>, otherwise matching is <i>case in-sensitive</i> */
public RegexValidator(String[] regexs, boolean caseSensitive)
{
if (regexs == null || regexs.length == 0)
{
throw new IllegalArgumentException("Regular expressions are missing");
}
patterns = new Pattern[regexs.length];
int flags = (caseSensitive ? 0 : Pattern.CASE_INSENSITIVE);
for (int i = 0; i < regexs.length; i++)
{
if (regexs[i] == null || regexs[i].length() == 0)
{
throw new IllegalArgumentException("Regular expression[" + i + "] is missing");
}
patterns[i] = Pattern.compile(regexs[i], flags);
}
}
/** Validate a value against the set of regular expressions returning the array of matched groups.
*
* @param value
* The value to validate.
* @return String array of the <i>groups</i> matched if valid or <code>null</code> if invalid */
public String[] match(String value)
{
if (value == null)
{
return null;
}
for (int i = 0; i < patterns.length; i++)
{
Matcher matcher = patterns[i].matcher(value);
if (matcher.matches())
{
int count = matcher.groupCount();
String[] groups = new String[count];
for (int j = 0; j < count; j++)
{
groups[j] = matcher.group(j + 1);
}
return groups;
}
}
return null;
}
/** Provide a String representation of this validator.
*
* @return A String representation of this validator */
public String toString()
{
StringBuffer buffer = new StringBuffer();
buffer.append("RegexValidator{");
for (int i = 0; i < patterns.length; i++)
{
if (i > 0)
{
buffer.append(",");
}
buffer.append(patterns[i].pattern());
}
buffer.append("}");
return buffer.toString();
}
}
}

View File

@@ -0,0 +1,116 @@
package com.lanternsoftware.util.hash;
import java.security.MessageDigest;
import com.lanternsoftware.util.NullUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
public abstract class AbstractHashTool {
private final Logger LOG = LoggerFactory.getLogger(getClass());
protected final MessageDigest digest;
protected final byte[] staticSalt;
protected final boolean prependSalt;
protected final int iterations;
public AbstractHashTool(String _algorithm, String _staticSalt, boolean _prependSalt, int _iterationCount) {
prependSalt = _prependSalt;
MessageDigest digestInstance = null;
try {
digestInstance = MessageDigest.getInstance(_algorithm);
}
catch (Exception e) {
LOG.error("Failed to create digest: " + _algorithm, e);
}
digest = digestInstance;
staticSalt = NullUtils.toByteArray(_staticSalt);
iterations = Math.max(1, _iterationCount);
}
/**
* @param _value a string value to hash using the static salt and algorithm of this hash tool
* @return the hex encoded hashed value
*/
public synchronized String hashHex(String _value) {
return hashHex(_value, null);
}
/**
* @param _value a string value to hash using the static salt and algorithm of this hash tool
* @param _salt a salt to use for this hash operation along with the static salt of this hash tool
* @return the hex encoded hashed value
*/
public synchronized String hashHex(String _value, String _salt) {
return new String(Hex.encodeHex(hash(_value, _salt)));
}
/**
* @param _value a string value to hash using the static salt and algorithm of this hash tool
* @return the base64 encoded hashed value
*/
public synchronized String hash64(String _value) {
return hash64(_value, null);
}
/**
* @param _value a string value to hash using the static salt and algorithm of this hash tool
* @param _salt a salt to use for this hash operation along with the static salt of this hash tool
* @return the base64 encoded hashed value
*/
public synchronized String hash64(String _value, String _salt) {
return new String(Base64.encodeBase64(hash(_value, _salt)));
}
/**
* @param _value a string value to hash using the static salt and algorithm of this hash tool
* @return the hashed value
*/
public synchronized byte[] hash(String _value) {
return hash(_value, null);
}
/**
* @param _value a string value to hash using the static salt and algorithm of this hash tool
* @param _salt a salt to use for this hash operation along with the static salt of this hash tool
* @return the hashed value
*/
public synchronized byte[] hash(String _value, String _salt) {
byte[] btValue = NullUtils.toByteArray(_value);
byte[] btSalt = NullUtils.toByteArray(_salt);
for (int i = 0; i < iterations; i++)
btValue = hash(salt(btValue, btSalt));
return btValue;
}
/**
* @param _value a byte array to hash using the static salt and algorithm of this hash tool
* @return the base64 encoded hashed value
*/
public synchronized String hash64(byte[] _value) {
return new String(Base64.encodeBase64(hash(_value)));
}
/**
* @param _value a byte array to hash using the static salt and algorithm of this hash tool
* @return the hashed value
*/
public synchronized byte[] hash(byte[] _value) {
return digest.digest(_value);
}
/**
* @param _value a byte array to hash using the static salt and algorithm of this hash tool
* @param _salt a salt to use for this hash operation along with the static salt of this hash tool
* @return the hashed value
*/
public synchronized byte[] salt(byte[] _value, byte[] _salt) {
if (prependSalt)
return CollectionUtils.merge(staticSalt, _salt, _value);
return CollectionUtils.merge(_value, staticSalt, _salt);
}
}

View File

@@ -0,0 +1,27 @@
package com.lanternsoftware.util.hash;
public class MD5HashTool extends AbstractHashTool {
/**
* Creates an MD5 hash tool with no static salt that performs only one hash iteration
*/
public MD5HashTool() {
this(null, 1);
}
/**
* Creates an MD5 hash tool with the specified static salt that performs only one hash iteration
* @param _salt the salt to attach each time a hash is performed with this tool
*/
public MD5HashTool(String _salt) {
this(_salt, 1);
}
/**
* Creates an MD5 hash tool with the specified static salt that performs the specified number of iterations each time a hash is performed
* @param _salt the salt to attach each time a hash is performed with this tool
* @param _iterations the number of times to hash values
*/
public MD5HashTool(String _salt, int _iterations) {
super("MD5", _salt, false, _iterations);
}
}

View File

@@ -0,0 +1,27 @@
package com.lanternsoftware.util.hash;
public class SHA1HashTool extends AbstractHashTool {
/**
* Creates an SHA-1 hash tool with no static salt that performs only one hash iteration
*/
public SHA1HashTool() {
this(null, 1);
}
/**
* Creates an SHA-1 hash tool with the specified static salt that performs only one hash iteration
* @param _salt the salt to attach each time a hash is performed with this tool
*/
public SHA1HashTool(String _salt) {
this(_salt, 1);
}
/**
* Creates an SHA-1 hash tool with the specified static salt that performs the specified number of iterations each time a hash is performed
* @param _salt the salt to attach each time a hash is performed with this tool
* @param _iterations the number of times to hash values
*/
public SHA1HashTool(String _salt, int _iterations) {
super("SHA-1", _salt, true, _iterations);
}
}

View File

@@ -0,0 +1,27 @@
package com.lanternsoftware.util.hash;
public class SHA512HashTool extends AbstractHashTool {
/**
* Creates an SHA-512 hash tool with no static salt that performs only one hash iteration
*/
public SHA512HashTool() {
this(null, 1);
}
/**
* Creates an SHA-512 hash tool with the specified static salt that performs only one hash iteration
* @param _salt the salt to attach each time a hash is performed with this tool
*/
public SHA512HashTool(String _salt) {
this(_salt, 1);
}
/**
* Creates an SHA-512 hash tool with the specified static salt that performs the specified number of iterations each time a hash is performed
* @param _salt the salt to attach each time a hash is performed with this tool
* @param _iterations the number of times to hash values
*/
public SHA512HashTool(String _salt, int _iterations) {
super("SHA-512", _salt, true, _iterations);
}
}

View File

@@ -0,0 +1,10 @@
package com.lanternsoftware.util.tracing;
import java.util.Date;
import java.util.List;
public interface ITracer {
void config(TracerConfig _config);
TraceDuration createDuration(TraceContext _parent, String _name, Date _start);
void trace(String _name, TraceDuration _duration, TraceTags _tags, List<TraceLog> _logs);
}

View File

@@ -0,0 +1,32 @@
package com.lanternsoftware.util.tracing;
import java.util.HashMap;
import java.util.Map;
import com.lanternsoftware.util.NullUtils;
public class TraceContext extends HashMap<String, String> {
public String serialize() {
StringBuilder s = null;
for (Map.Entry<String, String> e : entrySet()) {
if (s == null)
s = new StringBuilder();
else
s.append("~");
s.append(e.getKey());
s.append(".");
s.append(e.getValue());
}
return (s == null)?"":s.toString();
}
public static TraceContext deserialize(String _context) {
TraceContext context = new TraceContext();
for (String key : NullUtils.cleanSplit(_context, "~")) {
String[] parts = NullUtils.cleanSplit(key, "\\.");
if (parts.length == 2)
context.put(parts[0], parts[1]);
}
return context.isEmpty()?null:context;
}
}

View File

@@ -0,0 +1,39 @@
package com.lanternsoftware.util.tracing;
import java.util.Date;
public class TraceDuration {
private long start;
private long curStart;
private long duration = 0;
public TraceDuration(Date _start) {
start = curStart = _start.getTime();
}
public void start() {
curStart = new Date().getTime();
if (duration == 0)
start = curStart;
}
public void stop() {
duration += (new Date().getTime()-curStart);
}
public Date currentTimeOffset() {
return new Date(start + duration + (new Date().getTime()-curStart));
}
public long duration() {
return duration;
}
public long end() {
return start + duration;
}
public TraceContext getContext() {
return null;
}
}

View File

@@ -0,0 +1,8 @@
package com.lanternsoftware.util.tracing;
public enum TraceFrequencyType {
ALL,
PERCENTAGE,
MAX_TRACES_PER_SECOND,
REMOTE_CONTROLLED
}

View File

@@ -0,0 +1,21 @@
package com.lanternsoftware.util.tracing;
import java.util.Date;
public class TraceLog {
private final Date timeStamp;
private final String event;
public TraceLog(Date _timeStamp, String _event) {
timeStamp = _timeStamp;
event = _event;
}
public Date getTimeStamp() {
return timeStamp;
}
public String getEvent() {
return event;
}
}

View File

@@ -0,0 +1,5 @@
package com.lanternsoftware.util.tracing;
public interface TraceSpan {
void setTag(String _name, String _value);
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.tracing;
import java.util.HashMap;
public class TraceTags extends HashMap<String, String> {
public static TraceTags tag(String _name, String _value) {
TraceTags tags = new TraceTags();
tags.put(_name, _value);
return tags;
}
public TraceTags withTag(String _name, String _value) {
put(_name, _value);
return this;
}
}

View File

@@ -0,0 +1,77 @@
package com.lanternsoftware.util.tracing;
public class TracerConfig {
private final String appName;
private final String endpoint;
private final boolean suppressLocalLog;
private TraceFrequencyType frequencyType;
private double frequency;
private boolean useThreadContext = false;
public TracerConfig(String _appName, String _endpoint) {
this(_appName, _endpoint, true);
}
public TracerConfig(String _appName, String _endpoint, boolean _suppressLocalLog) {
appName = _appName;
endpoint = _endpoint;
suppressLocalLog = _suppressLocalLog;
frequencyType = TraceFrequencyType.ALL;
frequency = 0.0;
}
public TracerConfig withFrequency(TraceFrequencyType _type, double _frequency) {
frequencyType = _type;
frequency = _frequency;
return this;
}
public TracerConfig tracePercentage(double _percentage) {
return withFrequency(TraceFrequencyType.PERCENTAGE, _percentage);
}
public TracerConfig traceMaximumPerSecond(int _max) {
return withFrequency(TraceFrequencyType.MAX_TRACES_PER_SECOND, _max);
}
public TracerConfig traceRateControlledRemotely() {
return withFrequency(TraceFrequencyType.REMOTE_CONTROLLED, 0.0);
}
public TracerConfig traceAll() {
return withFrequency(TraceFrequencyType.ALL, 1.0);
}
public TracerConfig useThreadContext(boolean _useThreadContext) {
setUseThreadContext(_useThreadContext);
return this;
}
public String getAppName() {
return appName;
}
public String getEndpoint() {
return endpoint;
}
public boolean isSuppressLocalLog() {
return suppressLocalLog;
}
public TraceFrequencyType getFrequencyType() {
return frequencyType;
}
public double getFrequency() {
return frequency;
}
public boolean isUseThreadContext() {
return useThreadContext;
}
public void setUseThreadContext(boolean _useThreadContext) {
useThreadContext = _useThreadContext;
}
}

View File

@@ -0,0 +1,99 @@
package com.lanternsoftware.util.xml;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
public class XmlNode
{
private String content;
private final Map<String, String> attributes = new HashMap<String, String>();
private final Map<String, List<XmlNode>> children = new HashMap<String, List<XmlNode>>();
public String getContent()
{
return content;
}
public void setContent(String _content)
{
content = _content;
}
public Map<String, String> getAttributes()
{
return attributes;
}
public Map<String, List<XmlNode>> getChildren()
{
return children;
}
public void addChild(String _name, XmlNode _node)
{
CollectionUtils.addToMultiMap(_name, _node, children);
}
public XmlNode getChild(List<String> _path)
{
if (CollectionUtils.isEmpty(_path))
return this;
XmlNode node = CollectionUtils.getFirst(children.get(CollectionUtils.getFirst(_path)));
if (node == null)
return null;
return node.getChild(_path.subList(1, _path.size()));
}
public List<XmlNode> getChildren(List<String> _path)
{
if (CollectionUtils.size(_path) == 1)
return CollectionUtils.makeNotNull(children.get(_path.get(0)));
List<XmlNode> nodes = new ArrayList<XmlNode>();
for (XmlNode node : CollectionUtils.makeNotNull(children.get(CollectionUtils.getFirst(_path))))
{
nodes.addAll(node.getChildren(_path.subList(1, _path.size())));
}
return nodes;
}
public XmlNode getChild(List<String> _path, String _attributeName, String _attributeValue)
{
for (XmlNode node : getChildren(_path))
{
if (NullUtils.isEqual(node.getAttributes().get(_attributeName), _attributeValue))
return node;
}
return null;
}
public String getChildContent(List<String> _path, String _attributeName, String _attributeValue)
{
XmlNode node = getChild(_path, _attributeName, _attributeValue);
if (node == null)
return null;
return node.getContent();
}
public String getChildAttribute(List<String> _path, String _attributeName)
{
XmlNode child = getChild(_path);
if (child == null)
return null;
return child.getAttributes().get(_attributeName);
}
public String getChildContent(List<String> _path)
{
XmlNode node = getChild(_path);
if (node == null)
return null;
return node.getContent();
}
}

View File

@@ -0,0 +1,83 @@
package com.lanternsoftware.util.xml;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Stack;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import com.lanternsoftware.util.NullUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class XmlParser {
protected static final Logger LOG = LoggerFactory.getLogger(XmlParser.class);
public static XmlNode loadXmlFile(String _filePath) {
FileInputStream is = null;
try {
is = new FileInputStream(_filePath);
return parseXml(is);
}
catch (Exception _e) {
LOG.error("Failed to load xml file", _e);
return null;
}
finally {
IOUtils.closeQuietly(is);
}
}
public static XmlNode parseXml(InputStream _is) {
XMLStreamReader reader = null;
try {
XmlNode node = null;
StringBuilder content = null;
Stack<XmlNode> stack = new Stack<XmlNode>();
reader = XMLInputFactory.newInstance().createXMLStreamReader(_is);
while (reader.hasNext()) {
switch (reader.next()) {
case XMLStreamConstants.START_ELEMENT: {
node = new XmlNode();
content = new StringBuilder();
for (int i = 0; i < reader.getAttributeCount(); i++) {
node.getAttributes().put(reader.getAttributeLocalName(i), reader.getAttributeValue(i));
}
stack.push(node);
break;
}
case XMLStreamConstants.CHARACTERS: {
content.append(NullUtils.makeNotNull(reader.getText()));
break;
}
case XMLStreamConstants.END_ELEMENT: {
node = stack.pop();
if (stack.empty())
return node;
stack.peek().addChild(reader.getLocalName(), node);
if (content != null)
node.setContent(content.toString().trim());
break;
}
}
}
}
catch (Exception _e) {
LOG.error("Failed to parse XML", _e);
}
finally {
try {
reader.close();
}
catch (XMLStreamException _e) {
LOG.error("Failed to close XML stream", _e);
}
IOUtils.closeQuietly(_is);
}
return null;
}
}

View File

@@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanternsoftware.util</groupId>
<artifactId>lantern-util-dao-ephemeral</artifactId>
<name>lantern-util-dao-ephemeral</name>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>lantern-util-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>lantern-util-dao</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.2</version>
<executions>
<execution>
<goals>
<goal>testCompile</goal>
</goals>
<phase>compile</phase>
</execution>
</executions>
<configuration>
<optimize>true</optimize>
<showDeprecation>true</showDeprecation>
<encoding>UTF-8</encoding>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,284 @@
package com.lanternsoftware.util.dao.ephemeral;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import com.lanternsoftware.util.dao.AbstractDaoProxy;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.DaoQuery;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.DaoSort;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.IFilter;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.ResourceLoader;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
public class EphemeralProxy extends AbstractDaoProxy {
private static final Logger LOG = LoggerFactory.getLogger(EphemeralProxy.class);
private final Map<String, Map<String, DaoEntity>> tables = new HashMap<>();
private final Map<String, Class<?>> tableClasses = new HashMap<>();
private final DaoProxyType serializerType;
private long genericSequence = 100;
public static EphemeralProxy loadFromDisk(String _path) {
return loadFromDisk(_path, DaoProxyType.MONGO);
}
public static EphemeralProxy loadFromDisk(String _path, DaoProxyType _serializerType) {
EphemeralProxy proxy = new EphemeralProxy(_serializerType);
try {
File file = new File(_path);
if (file.isDirectory()) {
for (File child : file.listFiles()) {
if (child.getName().endsWith(".json")) {
Class<?> clazz = null;
try {
clazz = Class.forName(child.getName().substring(0, child.getName().length()-5));
} catch (ClassNotFoundException _e) {
continue;
}
List<DaoEntity> entities = DaoSerializer.parseList(NullUtils.toString(ResourceLoader.loadFile(child)));
proxy.save(clazz, entities);
}
}
}
} catch (Exception _e) {
LOG.error("Failed to load directory: " + _path);
}
return proxy;
}
public EphemeralProxy() {
this(DaoProxyType.MONGO);
}
public EphemeralProxy(DaoProxyType _serializerType) {
serializerType = _serializerType;
}
public void writeToDisk(String _path) {
writeToDisk(_path, null);
}
public void writeToDisk(String _path, String _fileNameSuffix) {
File file = new File(_path);
file.mkdirs();
if (!_path.endsWith(File.separator))
_path += File.separator;
for (Entry<String, Map<String, DaoEntity>> e : tables.entrySet()) {
try {
String json = DaoSerializer.toJson(e.getValue().values());
String filename = _path + tableClasses.get(e.getKey()).getCanonicalName();
if (_fileNameSuffix != null)
filename += _fileNameSuffix;
ResourceLoader.writeFile(filename, NullUtils.toByteArray(json));
}
catch (Throwable t) {
LOG.error("Failed to write collection " + e.getKey() + " to disk", t);
}
}
}
@Override
public DaoProxyType getType() {
return serializerType;
}
@Override
public synchronized List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
Map<String, DaoEntity> table = tables.get(_tableName);
if (table == null)
return new ArrayList<>();
return CollectionUtils.filter(table.values(), new QueryFilter(_query));
}
@Override
public synchronized void update(Class<?> _class, DaoQuery _query, DaoEntity _changes) {
for (DaoEntity entity : queryForEntities(DaoSerializer.getTableName(_class, getType()), _query)) {
for (Entry<String, Object> change : _changes.entrySet()) {
entity.put(change.getKey(), change.getValue());
}
}
}
@Override
public synchronized <T> T updateOne(Class<T> _class, DaoQuery _query, DaoEntity _changes) {
DaoEntity entity = CollectionUtils.getFirst(queryForEntities(DaoSerializer.getTableName(_class, getType()), _query));
for (Entry<String, Object> change : _changes.entrySet()) {
entity.put(change.getKey(), change.getValue());
}
return DaoSerializer.fromDaoEntity(entity, _class);
}
@Override
public String saveEntity(String _collection, DaoEntity _entity) {
return saveEntity(_collection, CollectionUtils.asArrayList("id"), _entity);
}
@Override
public synchronized String saveEntity(Class<?> _class, DaoEntity _entity) {
String tableName = DaoSerializer.getTableName(_class, getType());
tableClasses.put(tableName, _class);
return saveEntity(tableName, DaoSerializer.getFieldsByAnnotation(_class, PrimaryKey.class), _entity);
}
private String saveEntity(String _tableName, List<String> _primaryKeys, DaoEntity _entity) {
if (_entity == null)
return null;
String pk;
if (!_entity.containsKey("_id")) {
for (String key : CollectionUtils.makeNotNull(_primaryKeys)) {
Object value = _entity.remove(key);
if ((value instanceof String) || (value == null)) {
if (NullUtils.isEmpty((String) value)) {
value = UUID.randomUUID().toString();
}
} else if (value instanceof Long) {
if (((Long) value) == 0) {
value = getNextSequence();
}
} else if (value instanceof Integer) {
if (((Integer) value) == 0) {
value = new Long(getNextSequence()).intValue();
}
}
_entity.put(key, value);
}
pk = CollectionUtils.commaSeparated(CollectionUtils.transform(CollectionUtils.getAll(_entity, _primaryKeys), new ITransformer<Object, String>() {
@Override
public String transform(Object _o) {
return DaoSerializer.toString(_o);
}
}));
}
else
pk = DaoSerializer.getString(_entity, "_id");
Map<String, DaoEntity> table = tables.get(_tableName);
if (table != null)
table.remove(pk);
else {
table = new HashMap<>();
tables.put(_tableName, table);
}
table.put(pk, _entity);
return pk;
}
private long getNextSequence() {
return genericSequence++;
}
@Override
public synchronized boolean delete(String _tableName, DaoQuery _query) {
IFilter<DaoEntity> filter = new QueryFilter(_query);
Map<String, DaoEntity> table = tables.get(_tableName);
if (table != null) {
Iterator<DaoEntity> iter = table.values().iterator();
while (iter.hasNext()) {
DaoEntity entity = iter.next();
if (filter.isFiltered(entity))
iter.remove();
}
}
return true;
}
@Override
public int count(String _tableName, DaoQuery _query) {
return queryForEntities(_tableName, _query).size();
}
private class QueryFilter implements IFilter<DaoEntity> {
private final DaoQuery query;
QueryFilter(DaoQuery _query) {
query = _query;
}
@Override
public boolean isFiltered(DaoEntity _daoEntity) {
if (query == null)
return true;
for (Entry<String, Object> qual : query.entrySet()) {
if (qual.getValue() instanceof DaoQuery) {
DaoQuery child = (DaoQuery) qual.getValue();
Object comp = child.get("$ne");
if ((comp != null) && DaoSerializer.compare(_daoEntity, qual.getKey(), comp) == 0)
return false;
comp = child.get("$gt");
if ((comp != null) && DaoSerializer.compare(_daoEntity, qual.getKey(), comp) <= 0)
return false;
comp = child.get("$lt");
if ((comp != null) && DaoSerializer.compare(_daoEntity, qual.getKey(), comp) >= 0)
return false;
comp = child.get("$gte");
if ((comp != null) && DaoSerializer.compare(_daoEntity, qual.getKey(), comp) < 0)
return false;
comp = child.get("$lte");
if ((comp != null) && DaoSerializer.compare(_daoEntity, qual.getKey(), comp) > 0)
return false;
comp = child.get("$contains");
if ((comp != null) && !DaoSerializer.getString(_daoEntity, qual.getKey()).contains((String) comp))
return false;
comp = child.get("$startsWith");
if ((comp != null) && !DaoSerializer.getString(_daoEntity, qual.getKey()).startsWith((String) comp))
return false;
comp = child.get("$containsIgnoreCase");
if ((comp != null) && !DaoSerializer.getString(_daoEntity, qual.getKey()).toLowerCase().contains(((String) comp).toLowerCase()))
return false;
comp = child.get("$equalssIgnoreCase");
if ((comp != null) && !DaoSerializer.getString(_daoEntity, qual.getKey()).toLowerCase().equals(((String) comp).toLowerCase()))
return false;
comp = child.get("$startsWithIgnoreCase");
if ((comp != null) && !DaoSerializer.getString(_daoEntity, qual.getKey()).toLowerCase().startsWith(((String) comp).toLowerCase()))
return false;
comp = child.get("$in");
if ((comp != null) && !in(_daoEntity.get(qual.getKey()), (Collection) comp))
return false;
comp = child.get("$nin");
if ((comp != null) && in(_daoEntity.get(qual.getKey()), (Collection) comp))
return false;
}
else if ((qual.getValue() instanceof String) && NullUtils.isEqual(qual.getValue(), "$null")) {
if (_daoEntity.get(qual.getKey()) != null)
return false;
}
else if ((qual.getValue() instanceof String) && NullUtils.isEqual(qual.getValue(), "$notnull")) {
if (_daoEntity.get(qual.getKey()) == null)
return false;
}
else if (DaoSerializer.compare(_daoEntity, qual.getKey(), qual.getValue()) != 0)
return false;
}
return true;
}
}
private boolean in(Object field, Collection qual) {
for (Object qualObject : qual) {
if (field instanceof Collection) {
for (Object fieldObject : (Collection) field) {
if (NullUtils.isEqual(fieldObject, qualObject))
return true;
}
}
else if (NullUtils.isEqual(field, qualObject))
return true;
}
return false;
}
}

View File

@@ -0,0 +1,65 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanternsoftware.util</groupId>
<artifactId>lantern-util-dao-mongo</artifactId>
<name>lantern-util-dao-mongo</name>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>lantern-util-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>lantern-util-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver</artifactId>
<version>3.12.5</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.2</version>
<executions>
<execution>
<goals>
<goal>testCompile</goal>
</goals>
<phase>compile</phase>
</execution>
</executions>
<configuration>
<optimize>true</optimize>
<showDeprecation>true</showDeprecation>
<encoding>UTF-8</encoding>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,108 @@
package com.lanternsoftware.util.dao.mongo;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Collection;
import org.apache.commons.io.IOUtils;
import org.bson.BsonBinaryReader;
import org.bson.BsonBinaryWriter;
import org.bson.Document;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.DocumentCodec;
import org.bson.codecs.EncoderContext;
import org.bson.io.BasicOutputBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
public class BsonUtils {
private static final Logger LOG = LoggerFactory.getLogger(BsonUtils.class);
public static Document parse(String _json)
{
try
{
return Document.parse(_json);
}
catch (Exception _e)
{
LOG.error("Failed to parse json", _e);
return null;
}
}
public static String toJson(Document _d)
{
try
{
if (_d != null)
return _d.toJson();
}
catch (Exception _e)
{
LOG.error("Failed to convert bson document to json", _e);
}
return null;
}
public static String toJson(Collection<Document> _collDocs)
{
if (CollectionUtils.isEmpty(_collDocs))
return "";
StringBuilder b = null;
for (Document d : _collDocs)
{
if (b == null)
b = new StringBuilder("[");
else
b.append(",");
b.append(toJson(d));
}
b.append("]");
return b.toString();
}
public static byte[] toByteArray(Document _d)
{
BsonBinaryWriter writer = null;
try
{
BasicOutputBuffer buffer = new BasicOutputBuffer();
writer = new BsonBinaryWriter(buffer);
new DocumentCodec().encode(writer, _d, EncoderContext.builder().build());
return buffer.toByteArray();
}
catch (Throwable _t)
{
LOG.error("Failed to convert bson document to a byte array", _t);
return null;
}
finally
{
IOUtils.closeQuietly(writer);
}
}
public static Document fromByteArray(byte[] _data)
{
if (_data == null)
return null;
BsonBinaryReader reader = null;
try
{
reader = new BsonBinaryReader(ByteBuffer.wrap(_data).order(ByteOrder.LITTLE_ENDIAN));
return new DocumentCodec().decode(reader, DecoderContext.builder().build());
}
catch (Throwable _t)
{
LOG.error("Failed to convert byte array into bson document", _t);
return null;
}
finally
{
IOUtils.closeQuietly(reader);
}
}
}

View File

@@ -0,0 +1,149 @@
package com.lanternsoftware.util.dao.mongo;
import java.util.Collections;
import java.util.List;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import org.apache.commons.codec.binary.Base64;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.ResourceLoader;
import com.lanternsoftware.util.cryptography.AESTool;
@DBSerializable
public class MongoConfig {
private static final AESTool aes = new AESTool(4501188070455102914L,4127218394209583290L,8065326024699768144L,6272281743831953728L);
private List<String> hosts;
private String username;
private String password;
private String clientKeystorePath;
private String clientKeystorePassword;
private String caKeystorePath;
private String caKeystorePassword;
private String databaseName;
private String authenticationDatabase;
public MongoConfig() {
}
public MongoConfig(String _host, String _username, String _password, String _databaseName) {
this(Collections.singletonList(_host), _username, _password, null, null, null, null, _databaseName);
}
public MongoConfig(List<String> _hosts, String _username, String _password, String _clientKeystorePath, String _clientKeystorePassword, String _caKeystorePath, String _caKeystorePassword, String _databaseName) {
hosts = _hosts;
username = _username;
password = _password;
clientKeystorePath = _clientKeystorePath;
clientKeystorePassword = _clientKeystorePassword;
caKeystorePath = _caKeystorePath;
caKeystorePassword = _caKeystorePassword;
databaseName = _databaseName;
}
public static AESTool getAes() {
return aes;
}
public List<String> getHosts() {
return hosts;
}
public void setHosts(List<String> _hosts) {
hosts = _hosts;
}
public String getUsername() {
return username;
}
public void setUsername(String _username) {
username = _username;
}
public String getPassword() {
return password;
}
public void setPassword(String _password) {
password = _password;
}
public String getClientKeystorePath() {
return clientKeystorePath;
}
public void setClientKeystorePath(String _clientKeystorePath) {
clientKeystorePath = _clientKeystorePath;
}
public String getClientKeystorePassword() {
return clientKeystorePassword;
}
public void setClientKeystorePassword(String _clientKeystorePassword) {
clientKeystorePassword = _clientKeystorePassword;
}
public String getCaKeystorePath() {
return caKeystorePath;
}
public void setCaKeystorePath(String _caKeystorePath) {
caKeystorePath = _caKeystorePath;
}
public String getCaKeystorePassword() {
return caKeystorePassword;
}
public void setCaKeystorePassword(String _caKeystorePassword) {
caKeystorePassword = _caKeystorePassword;
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String _databaseName) {
databaseName = _databaseName;
}
public String getAuthenticationDatabase() {
return authenticationDatabase;
}
public void setAuthenticationDatabase(String _authenticationDatabase) {
authenticationDatabase = _authenticationDatabase;
}
public void saveToDisk(String _filePath) {
ResourceLoader.writeFile(_filePath, encrypt());
}
public byte[] encrypt() {
return aes.encrypt(BsonUtils.toByteArray(DaoSerializer.toDaoEntity(this).toDocument()));
}
public String encryptToString() {
return Base64.encodeBase64String(encrypt());
}
public static MongoConfig fromDisk(String _path) {
return decrypt(ResourceLoader.loadFile(_path));
}
public static MongoConfig decrypt(byte[] _configData) {
if ((_configData == null) || (_configData.length == 0))
return null;
return DaoSerializer.fromDaoEntity(new DaoEntity(BsonUtils.fromByteArray(aes.decrypt(_configData))), MongoConfig.class);
}
public static MongoConfig decryptFromString(String _config) {
if (NullUtils.isEmpty(_config))
return null;
return decrypt(Base64.decodeBase64(_config));
}
}

View File

@@ -0,0 +1,464 @@
package com.lanternsoftware.util.dao.mongo;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import com.lanternsoftware.util.dao.AbstractDaoProxy;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.DaoQuery;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.DaoSort;
import com.lanternsoftware.util.dao.DaoSortField;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.model.DeleteOneModel;
import com.mongodb.client.model.ReplaceOptions;
import org.bson.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.cryptography.RSAUtils;
import com.lanternsoftware.util.hash.MD5HashTool;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.WriteModel;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
public class MongoProxy extends AbstractDaoProxy {
private static final Logger LOG = LoggerFactory.getLogger(MongoProxy.class);
private final MongoClient client;
private final String dbName;
private final Map<String, Set<String>> textIndexes = new HashMap<>();
private final MD5HashTool hash = new MD5HashTool();
public MongoProxy(MongoConfig _config) {
this(_config.getHosts(), _config.getUsername(), _config.getPassword(), _config.getClientKeystorePath(), _config.getClientKeystorePassword(), _config.getCaKeystorePath(), _config.getCaKeystorePassword(), _config.getDatabaseName(), _config.getAuthenticationDatabase());
}
public MongoProxy(List<String> _hosts, String _userName, String _password, String _clientKeystorePath, String _clientKeystorePassword, String _caKeystorePath, String _caKeystorePassword, String _dbName) {
this(_hosts, _userName, _password, _clientKeystorePath, _clientKeystorePassword, _caKeystorePath, _caKeystorePassword, _dbName, null);
}
public MongoProxy(List<String> _hosts, String _userName, String _password, String _clientKeystorePath, String _clientKeystorePassword, String _caKeystorePath, String _caKeystorePassword, String _dbName, String _authDbName) {
List<ServerAddress> listAddresses = new LinkedList<>();
for (String addr : _hosts) {
int portIdx = addr.indexOf(":");
if (portIdx > 0)
listAddresses.add(new ServerAddress(addr.substring(0, portIdx), DaoSerializer.toInteger(addr.substring(portIdx + 1))));
else
listAddresses.add(new ServerAddress(addr, 27017));
}
MongoClientOptions options;
if (NullUtils.isEmpty(_clientKeystorePath) && NullUtils.isEmpty(_caKeystorePath)) {
options = MongoClientOptions.builder().sslEnabled(false).build();
}
else {
try {
KeyManager[] keyManagers = null;
if (NullUtils.isNotEmpty(_clientKeystorePath)) {
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
kmf.init(RSAUtils.loadKeystore(_clientKeystorePath, _clientKeystorePassword), _clientKeystorePassword.toCharArray());
keyManagers = kmf.getKeyManagers();
}
TrustManager[] trustManagers = null;
if (NullUtils.isNotEmpty(_caKeystorePath)) {
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
if (NullUtils.isEqual(_caKeystorePath, "aws"))
tmf.init(RSAUtils.loadKeystore(getClass().getResourceAsStream("/ca.jks"), _caKeystorePassword));
else
tmf.init(RSAUtils.loadKeystore(_caKeystorePath, _caKeystorePassword));
trustManagers = tmf.getTrustManagers();
}
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(keyManagers, trustManagers, null);
options = MongoClientOptions.builder().sslEnabled(true).sslContext(sslContext).build();
}
catch (Exception _e) {
LOG.error("Failed to load keystores for MongoClient", _e);
options = MongoClientOptions.builder().sslEnabled(false).build();
}
}
client = new MongoClient(listAddresses, MongoCredential.createCredential(_userName, NullUtils.isNotEmpty(_authDbName) ? _authDbName : "admin", _password.toCharArray()), options);
dbName = _dbName;
}
@Override
public DaoProxyType getType() {
return DaoProxyType.MONGO;
}
@Override
public void shutdown() {
client.close();
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _first, int _count) {
return toObjects(queryForEntities(DaoSerializer.getTableName(_class, getType()), CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_class, PrimaryKey.class)), _query, _fields, _sort, _first, _count), _class);
}
@Override
public List<String> queryForField(Class<?> _class, DaoQuery _query, final String _field) {
String pk = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_class, PrimaryKey.class));
return CollectionUtils.transform(queryForEntities(DaoSerializer.getTableName(_class, getType()), pk, _query, Collections.singletonList(_field), null, 0, -1), new ITransformer<DaoEntity, String>() {
@Override
public String transform(DaoEntity _daoEntity) {
return DaoSerializer.getString(_daoEntity, _field);
}
});
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
return queryForEntities(_tableName, null, _query, _fields, _sort, _offset, _count);
}
public List<DaoEntity> queryForEntities(String _tableName, final String _primaryKey, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
final String pk = NullUtils.isEmpty(_primaryKey) ? "_id" : _primaryKey;
FindIterable<Document> iter;
if (_query != null) {
DaoQuery query = new DaoQuery();
for (Entry<String, Object> entry : _query.entrySet()) {
if (NullUtils.isEqual(entry.getKey(), pk))
query.put("_id", entry.getValue());
else {
boolean keyChanged = false;
if (entry.getValue() instanceof DaoQuery) {
DaoQuery child = (DaoQuery) entry.getValue();
Map<String, Object> newChildren = new HashMap<>();
Iterator<Entry<String, Object>> entryIter = child.entrySet().iterator();
while (entryIter.hasNext()) {
Entry<String, Object> childEntry = entryIter.next();
if (childEntry.getKey().startsWith("$contains")) {
boolean caseSensitive = !NullUtils.isEqual(childEntry.getKey(), "$containsIgnoreCase");
if (isTextIndex(_tableName, entry.getKey())) {
DaoQuery value = new DaoQuery();
value.put("$search", childEntry.getValue());
value.put("$caseSensitive", caseSensitive);
value.put("$diacriticSensitive", caseSensitive);
query.put("$text", value);
entryIter.remove();
keyChanged = true;
break;
}
else {
if (childEntry.getValue() instanceof String) {
newChildren.put("$regex", childEntry.getValue());
if (!caseSensitive)
newChildren.put("$options", "i");
}
entryIter.remove();
}
}
}
child.putAll(newChildren);
}
if (!keyChanged)
query.put(entry.getKey(), entry.getValue());
}
}
query = prepareQuery(query);
iter = db().getCollection(_tableName).find(query);
}
else
iter = db().getCollection(_tableName).find();
if (_fields != null) {
List<String> fields = new ArrayList<>();
for (String field : _fields) {
if (NullUtils.isEqual(field, pk))
fields.add("_id");
else
fields.add(field);
}
_fields = fields;
}
Document projection = toProjection(_fields);
if (projection != null)
iter.projection(projection);
Document sort = toSort(_sort);
if (sort != null)
iter.sort(sort);
if (_offset > 0)
iter.skip(_offset);
if (_count > 0)
iter.limit(_count);
return CollectionUtils.transform(iter, new ITransformer<Document, DaoEntity>() {
@Override
public DaoEntity transform(Document _document) {
return new DaoEntity(_document);
}
});
}
@Override
public void update(Class<?> _class, DaoQuery _query, DaoEntity _changes) {
DaoQuery query = prepareQuery(_query);
coll(_class).updateMany(query, _changes.toDocument());
}
@Override
public <T> T updateOne(Class<T> _class, DaoQuery _query, DaoEntity _changes) {
return DaoSerializer.fromDaoEntity(new DaoEntity(coll(_class).findOneAndUpdate(_query, _changes.toDocument())), _class);
}
@Override
public String saveEntity(Class<?> _class, DaoEntity _entity) {
if (_entity == null)
return null;
String id = DaoSerializer.getString(_entity, "_id");
if (NullUtils.isEmpty(id)) {
String primaryKeyField = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_class, PrimaryKey.class));
if (NullUtils.isEmpty(primaryKeyField)) {
primaryKeyField = "_id";
}
id = (String) _entity.remove(primaryKeyField);
if (NullUtils.isEmpty(id))
id = UUID.randomUUID().toString();
_entity.put("_id", id);
}
Document doc = _entity.toDocument();
UpdateResult result = coll(_class).replaceOne(new Document("_id", id), doc, new ReplaceOptions().upsert(true));
if (result.wasAcknowledged())
return id;
return null;
}
public String saveEntity(String _tableName, DaoEntity _entity) {
String id = DaoSerializer.getString(_entity, "_id");
if (NullUtils.isEmpty(id)) {
id = UUID.randomUUID().toString();
_entity.put("_id", id);
}
Document doc = _entity.toDocument();
UpdateResult result = db().getCollection(_tableName).replaceOne(new Document("_id", id), doc, new ReplaceOptions().upsert(true));
if (result.wasAcknowledged())
return id;
return null;
}
@Override
public <T> Map<String, T> save(Collection<T> _objects) {
if (CollectionUtils.isEmpty(_objects))
return new HashMap<>();
Iterator<T> iter = _objects.iterator();
while (iter.hasNext()) {
T t = iter.next();
if (t == null)
iter.remove();
}
T t = CollectionUtils.getFirst(_objects);
if (t == null)
return new HashMap<>();
Map<Class<?>, List<T>> classes = CollectionUtils.transformToMultiMap(_objects, new ITransformer<T, Class<?>>() {
@Override
public Class<?> transform(T _t) {
return _t.getClass();
}
});
final Map<String, T> ids = new HashMap<>();
for (Entry<Class<?>, List<T>> e : classes.entrySet()) {
String primaryKeyField = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(e.getKey(), PrimaryKey.class));
if (NullUtils.isEmpty(primaryKeyField)) {
primaryKeyField = "_id";
}
final String pk = primaryKeyField;
for (Collection<T> entities : CollectionUtils.split(e.getValue(), 5000)) {
List<WriteModel<Document>> updates = CollectionUtils.transform(entities, (_t) -> {
DaoEntity entity = DaoSerializer.toDaoEntity(_t, getType());
if (entity == null)
return null;
String id = DaoSerializer.getString(entity, "_id");
if (NullUtils.isEmpty(id)) {
id = (String) entity.remove(pk);
if (NullUtils.isEmpty(id))
id = UUID.randomUUID().toString();
entity.put("_id", id);
}
ids.put(id, _t);
return new ReplaceOneModel<>(new Document("_id", id), entity.toDocument(), new ReplaceOptions().upsert(true));
}, true);
if (!updates.isEmpty())
coll(e.getKey()).bulkWrite(updates);
}
}
return ids;
}
@Override
public Map<String, DaoEntity> save(Class<?> _class, Collection<DaoEntity> _entities) {
String primaryKeyField = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_class, PrimaryKey.class));
if (NullUtils.isEmpty(primaryKeyField)) {
primaryKeyField = "_id";
}
final Map<String, DaoEntity> ids = new HashMap<>();
final String pk = primaryKeyField;
List<DaoEntity> entities = (_entities instanceof List) ? (List<DaoEntity>) _entities : new ArrayList<>(_entities);
for (Collection<DaoEntity> curEntities : CollectionUtils.split(entities, 5000)) {
List<WriteModel<Document>> updates = CollectionUtils.transform(curEntities, (_t) -> {
String id = (String) _t.remove(pk);
if (NullUtils.isEmpty(id))
id = UUID.randomUUID().toString();
_t.put("_id", id);
ids.put(id, _t);
return new ReplaceOneModel<>(new Document("_id", id), _t.toDocument(), new ReplaceOptions().upsert(true));
}, true);
if (!updates.isEmpty())
coll(_class).bulkWrite(updates);
}
return ids;
}
public <T> T queryOneAndDelete(final Class<T> _class, DaoQuery _query) {
Document doc = coll(_class).findOneAndDelete(_query);
if (doc == null)
return null;
return toObject(new DaoEntity(doc), _class);
}
public int deleteById(Class<?> _class, List<String> _ids) {
BulkWriteResult result = coll(_class).bulkWrite(CollectionUtils.transform(_ids, _t -> new DeleteOneModel<>(new Document("_id", _t))));
if (result.wasAcknowledged())
return result.getDeletedCount();
return 0;
}
@Override
public boolean delete(Class<?> _class, DaoQuery _query) {
if (_query != null) {
String primaryKey = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_class, PrimaryKey.class));
DaoQuery query = new DaoQuery();
for (Entry<String, Object> entry : _query.entrySet()) {
if (NullUtils.isEqual(entry.getKey(), primaryKey))
query.put("_id", entry.getValue());
else
query.put(entry.getKey(), entry.getValue());
}
return delete(DaoSerializer.getTableName(_class, getType()), query);
}
return false;
}
@Override
public boolean delete(String _tableName, DaoQuery _query) {
DeleteResult result = db().getCollection(_tableName).deleteMany(prepareQuery(_query));
return result.wasAcknowledged();
}
@Override
public int count(String _tableName, DaoQuery _query) {
return (int) db().getCollection(_tableName).count(prepareQuery(_query));
}
public void ensureIndex(Class<?> _class, DaoSort _indexOrder) {
Document index = new Document();
for (DaoSortField field : _indexOrder.getFields()) {
index.put(field.getField(), field.isAscending() ? 1 : -1);
}
String indexName = CollectionUtils.transformAndDelimit(_indexOrder.getFields(), new ITransformer<DaoSortField, String>() {
@Override
public String transform(DaoSortField _daoSortField) {
return _daoSortField.getField();
}
}, "_");
LOG.debug("Ensuring index: " + indexName);
String tableName = NullUtils.makeNotNull(DaoSerializer.getTableName(_class, getType()));
if ((tableName.length() + indexName.length()) > 60) {
indexName = hash.hash64(indexName);
LOG.debug("Shortening index name to : " + indexName);
}
IndexOptions options = new IndexOptions();
options.name(indexName);
options.background(true);
if (!index.isEmpty())
db().getCollection(tableName).createIndex(index, options);
}
private MongoCollection<Document> coll(Class<?> _class) {
return db().getCollection(DaoSerializer.getTableName(_class, getType()));
}
private MongoDatabase db() {
return client.getDatabase(dbName);
}
private Document toProjection(Collection<String> _listFields) {
if (CollectionUtils.isEmpty(_listFields))
return null;
Document proj = new Document();
for (String field : CollectionUtils.makeNotNull(_listFields)) {
proj.put(field, 1);
}
return proj;
}
private Document toSort(DaoSort _sort) {
if ((_sort == null) || CollectionUtils.isEmpty(_sort.getFields()))
return null;
Document sort = new Document();
for (DaoSortField field : CollectionUtils.makeNotNull(_sort.getFields())) {
sort.put(field.getField(), field.isAscending() ? 1 : -1);
}
return sort;
}
private boolean isTextIndex(String _collection, String _field) {
Set<String> fields = textIndexes.get(_collection);
if (fields == null) {
fields = new HashSet<>();
for (Document index : db().getCollection(_collection).listIndexes()) {
for (Entry<String, Object> field : index.entrySet()) {
if (field.getValue() instanceof String && field.getValue().equals("text"))
fields.add(field.getKey());
}
}
textIndexes.put(_collection, fields);
}
return fields.contains(_field);
}
@Override
protected DaoQuery prepareQuery(DaoQuery _query) {
DaoQuery query = super.prepareQuery(_query);
prepareDates(query);
return query;
}
private void prepareDates(DaoQuery _query) {
for (Entry<String, Object> e : _query.entrySet()) {
if (e.getValue() instanceof Date)
e.setValue(((Date) e.getValue()).getTime());
if (e.getValue() instanceof DaoQuery)
prepareDates((DaoQuery) e.getValue());
}
}
}

View File

@@ -0,0 +1,51 @@
package com.lanternsoftware.util.dao.mongo.dao;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.mongo.MongoConfig;
public class MongoConfigSerializer extends AbstractDaoSerializer<MongoConfig>
{
@Override
public Class<MongoConfig> getSupportedClass()
{
return MongoConfig.class;
}
@Override
public DaoEntity toDaoEntity(MongoConfig _o)
{
DaoEntity d = new DaoEntity();
d.put("hosts", CollectionUtils.commaSeparated(_o.getHosts()));
d.put("username", _o.getUsername());
d.put("password", _o.getPassword());
d.put("client_keystore_path", _o.getClientKeystorePath());
d.put("client_keystore_password", _o.getClientKeystorePassword());
d.put("ca_keystore_path", _o.getCaKeystorePath());
d.put("ca_keystore_password", _o.getCaKeystorePassword());
d.put("database_name", _o.getDatabaseName());
d.put("authentication_database", _o.getAuthenticationDatabase());
return d;
}
@Override
public MongoConfig fromDaoEntity(DaoEntity _d)
{
MongoConfig o = new MongoConfig();
o.setHosts(CollectionUtils.asArrayList(NullUtils.cleanSplit(DaoSerializer.getString(_d, "hosts"), ",")));
o.setUsername(DaoSerializer.getString(_d, "username"));
o.setPassword(DaoSerializer.getString(_d, "password"));
o.setClientKeystorePath(DaoSerializer.getString(_d, "client_keystore_path"));
o.setClientKeystorePassword(DaoSerializer.getString(_d, "client_keystore_password"));
o.setCaKeystorePath(DaoSerializer.getString(_d, "ca_keystore_path"));
o.setCaKeystorePassword(DaoSerializer.getString(_d, "ca_keystore_password"));
o.setDatabaseName(DaoSerializer.getString(_d, "database_name"));
o.setAuthenticationDatabase(DaoSerializer.getString(_d, "authentication_database"));
return o;
}
}

View File

@@ -0,0 +1 @@
com.lanternsoftware.util.dao.mongo.dao.MongoConfigSerializer

Binary file not shown.

View File

@@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.lanternsoftware.util</groupId>
<artifactId>lantern-util-dao</artifactId>
<name>lantern-util-dao</name>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>lantern-util-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>bson</artifactId>
<version>4.0.4</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.2</version>
<executions>
<execution>
<goals>
<goal>testCompile</goal>
</goals>
<phase>compile</phase>
</execution>
</executions>
<configuration>
<optimize>true</optimize>
<showDeprecation>true</showDeprecation>
<encoding>UTF-8</encoding>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,323 @@
package com.lanternsoftware.util.dao;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
public abstract class AbstractDaoProxy implements IDaoProxy {
private ExecutorService executor;
private int maxThreads = 50;
protected QueryPreparer queryPreparer = null;
@Override
public void shutdown() {
if (executor != null)
executor.shutdownNow();
}
public void setQueryPreparer(QueryPreparer _queryPreparer) {
queryPreparer = _queryPreparer;
}
@Override
public <T> List<T> query(Class<T> _class, DaoQuery _query) {
return query(_class, _query, (DaoSort) null);
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, DaoSort _sort) {
return query(_class, _query, null, _sort);
}
@Override
public <T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return query(_class, _query, _fields, null);
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return query(_class, _query, _fields, _sort, 0, -1);
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _first, int _count) {
return toObjects(queryForEntities(DaoSerializer.getTableName(_class, getType()), _query, _fields, _sort, _first, _count), _class);
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query) {
return submit(new QueryExecution<T>(this, _class, _query));
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return submit(new QueryExecution<T>(this, _class, _query, _sort));
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return submit(new QueryExecution<T>(this, _class, _query, _fields));
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return submit(new QueryExecution<T>(this, _class, _query, _fields, _sort));
}
@Override
public <T, V> Future<List<V>> queryWithFinalizer(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, QueryFinalizer<T, V> _finalizer) {
return submit(new QueryFinalizerExecution<T, V>(this, _class, _query, _fields, _sort, _finalizer));
}
@Override
public <T> DaoPage<T> queryPage(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
return new DaoPage<T>(query(_class, _query, _fields, _sort, _offset, _count), count(_class, _query));
}
@Override
public DaoPage<DaoEntity> queryForEntitiesPage(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
return new DaoPage<DaoEntity>(queryForEntities(_tableName, _query, _fields, _sort, _offset, _count), count(_tableName, _query));
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query) {
return queryOne(_class, _query, null, null);
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return queryOne(_class, _query, null, _sort);
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return queryOne(_class, _query, _fields, null);
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return CollectionUtils.getFirst(query(_class, _query, _fields, _sort, 0, 1));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query) {
return submit(new QueryOneExecution<T>(this, _class, _query));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return submit(new QueryOneExecution<T>(this, _class, _query, _sort));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return submit(new QueryOneExecution<T>(this, _class, _query, _fields));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return submit(new QueryOneExecution<T>(this, _class, _query, _fields, _sort));
}
@Override
public <T> List<T> queryImportant(Class<T> _class, DaoQuery _query) {
return queryImportant(_class, _query, null);
}
@Override
public <T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return query(_class, _query, DaoSerializer.getImportantFields(_class), _sort);
}
@Override
public <T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort, int _first, int _count) {
return query(_class, _query, DaoSerializer.getImportantFields(_class), _sort, _first, _count);
}
@Override
public <T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query) {
return queryAsync(_class, _query, DaoSerializer.getImportantFields(_class));
}
@Override
public <T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return queryAsync(_class, _query, DaoSerializer.getImportantFields(_class), _sort);
}
@Override
public <T> DaoPage<T> queryImportantPage(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count) {
return new DaoPage<T>(queryImportant(_class, _query, _sort, _offset, _count), count(_class, _query));
}
@Override
public <T> List<T> queryAll(Class<T> _class) {
return query(_class, null);
}
@Override
public boolean exists(Class<?> _class, DaoQuery _query) {
return exists(DaoSerializer.getTableName(_class, getType()), _query);
}
@Override
public boolean exists(String _tableName, DaoQuery _query) {
return count(_tableName, _query) > 0;
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query) {
return queryForEntities(_tableName, _query, (DaoSort) null);
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields) {
return queryForEntities(_tableName, _query, _fields, null);
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, DaoSort _sort) {
return queryForEntities(_tableName, _query, null, _sort);
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return queryForEntities(_tableName, _query, _fields, _sort, 0, -1);
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, null, null, 0, 1));
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query, DaoSort _sort) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, null, _sort, 0, 1));
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, _fields, null, 0, 1));
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, _fields, _sort, 0, 1));
}
@Override
public String queryForOneField(Class<?> _class, DaoQuery _query, String _field) {
return CollectionUtils.getFirst(queryForField(_class, _query, _field));
}
@Override
public List<String> queryForField(Class<?> _class, DaoQuery _query, String _field) {
return queryForField(DaoSerializer.getTableName(_class, getType()), _query, _field);
}
@Override
public List<String> queryForField(Class<?> _class, DaoQuery _query, final String _field, DaoSort _sort) {
return CollectionUtils.transform(queryForEntities(DaoSerializer.getTableName(_class, getType()), _query, Arrays.asList(_field), _sort), new ITransformer<DaoEntity, String>() {
@Override
public String transform(DaoEntity _daoEntity) {
return DaoSerializer.getString(_daoEntity, _field);
}
});
}
@Override
public List<String> queryForField(String _tableName, DaoQuery _query, final String _field) {
return CollectionUtils.transform(queryForEntities(_tableName, _query, Arrays.asList(_field)), new ITransformer<DaoEntity, String>() {
@Override
public String transform(DaoEntity _daoEntity) {
return DaoSerializer.getString(_daoEntity, _field);
}
});
}
@Override
public String save(Object _object) {
return saveEntity(_object.getClass(), DaoSerializer.toDaoEntity(_object, getType()));
}
@Override
public <T> Map<String, T> save(Collection<T> _objects) {
Map<String, T> ids = new HashMap<String, T>();
for (T o : _objects) {
String id = save(o);
if (NullUtils.isNotEmpty(id))
ids.put(id, o);
}
return ids;
}
@Override
public Map<String, DaoEntity> save(Class<?> _class, Collection<DaoEntity> _entities) {
Map<String, DaoEntity> ids = new HashMap<>();
for (DaoEntity e : _entities) {
ids.put(saveEntity(_class, e), e);
}
return ids;
}
@Override
public boolean delete(Class<?> _class, DaoQuery _query) {
return delete(DaoSerializer.getTableName(_class, getType()), _query);
}
@Override
public int count(Class<?> _class, DaoQuery _query) {
return count(DaoSerializer.getTableName(_class, getType()), _query);
}
private <T> Future<List<T>> submit(Callable<List<T>> _execution) {
return executor().submit(_execution);
}
private <T> Future<T> submit(QueryOneExecution<T> _execution) {
return executor().submit(_execution);
}
public void setMaxThreads(int _maxThreads) {
maxThreads = _maxThreads;
}
@Override
public void setExecutor(ExecutorService _executor) {
executor = _executor;
}
private synchronized ExecutorService executor() {
if (executor == null)
executor = Executors.newFixedThreadPool(maxThreads);
return executor;
}
protected <T> T toObject(DaoEntity _entity, Class<T> _class) {
return CollectionUtils.getFirst(toObjects(Collections.singletonList(_entity), _class));
}
protected <T> List<T> toObjects(List<DaoEntity> _entities, final Class<T> _class) {
return CollectionUtils.transform(_entities, new ITransformer<DaoEntity, T>() {
@Override
public T transform(DaoEntity _daoEntity) {
return DaoSerializer.fromDaoEntity(_daoEntity, _class, getType());
}
});
}
protected DaoQuery prepareQuery(DaoQuery _query) {
if (queryPreparer == null)
return _query;
return queryPreparer.prepareQuery(_query);
}
}

View File

@@ -0,0 +1,253 @@
package com.lanternsoftware.util.dao;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
import com.lanternsoftware.util.dao.annotations.DBClob;
import com.lanternsoftware.util.dao.annotations.DBIgnore;
import com.lanternsoftware.util.dao.annotations.DBName;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.DBType;
import com.lanternsoftware.util.dao.annotations.Important;
import com.lanternsoftware.util.dao.annotations.TimestampDates;
import com.lanternsoftware.util.dao.annotations.Unimportant;
public abstract class AbstractDaoSerializer<T> implements IDaoSerializer<T> {
protected final Map<Class<? extends Annotation>, List<String>> annotations = new HashMap<Class<? extends Annotation>, List<String>>();
protected final List<String> importantFields = new ArrayList<String>();
protected final Map<String, Integer> fieldTypes = new HashMap<String, Integer>();
public AbstractDaoSerializer() {
addFields(getSupportedClass());
}
public void addFields(Class<?> _class) {
if (_class == null) {
return;
}
List<String> important = new ArrayList<String>();
List<String> unimportant = new ArrayList<String>();
List<String> normal = new ArrayList<String>();
for (Field f : _class.getDeclaredFields()) {
if (!isSerializable(f))
continue;
String dbName = fieldToDatabaseName(f);
if (f.isAnnotationPresent(Important.class))
important.add(dbName);
else if (f.isAnnotationPresent(Unimportant.class))
unimportant.add(dbName);
else
normal.add(dbName);
Class<?> type = getType(f);
if (NullUtils.isOneOf(f.getType(), Byte.TYPE, byte.class))
fieldTypes.put(dbName, Types.INTEGER);
if (NullUtils.isOneOf(f.getType(), Short.TYPE, Short.class))
fieldTypes.put(dbName, Types.INTEGER);
else if (NullUtils.isOneOf(f.getType(), Integer.TYPE, Integer.class))
fieldTypes.put(dbName, Types.INTEGER);
else if (NullUtils.isOneOf(f.getType(), Long.TYPE, Long.class))
fieldTypes.put(dbName, Types.BIGINT);
else if (NullUtils.isOneOf(f.getType(), Double.TYPE, Double.class, Float.TYPE, Float.class))
fieldTypes.put(dbName, Types.DOUBLE);
else if (NullUtils.isOneOf(f.getType(), Boolean.TYPE, Boolean.class))
fieldTypes.put(dbName, Types.BIT);
else if (f.getType().equals(String.class) || f.getType().isEnum()) {
if (f.isAnnotationPresent(DBClob.class))
fieldTypes.put(dbName, Types.CLOB);
else
fieldTypes.put(dbName, Types.VARCHAR);
}
else if (f.getType().equals(Date.class)) {
if (DaoSerializer.isAnnotationPresent(_class, TimestampDates.class))
fieldTypes.put(dbName, Types.TIMESTAMP);
else
fieldTypes.put(dbName, Types.BIGINT);
}
for (Annotation a : f.getAnnotations()) {
CollectionUtils.addToMultiMap(a.annotationType(), dbName, annotations);
}
}
if (!important.isEmpty())
importantFields.addAll(important);
else {
normal.removeAll(unimportant);
importantFields.addAll(normal);
}
addFields(_class.getSuperclass());
}
@Override
public List<String> getFieldsByAnnotation(Class<? extends Annotation> _fieldAnnotation) {
return CollectionUtils.makeNotNull(annotations.get(_fieldAnnotation));
}
@Override
public List<String> getImportantFields() {
return importantFields;
}
@Override
public int getSqlType(String _fieldName) {
Integer type = fieldTypes.get(_fieldName);
if (type == null)
return Types.NULL;
return type;
}
@Override
public String getTableName() {
DBSerializable table = getSupportedClass().getAnnotation(DBSerializable.class);
if ((table != null) && NullUtils.isNotEmpty(table.name()))
return table.name();
return getterNameToDatabaseName(getSupportedClass().getSimpleName());
}
public static String fieldToGetterName(Field _field) {
String name = _field.getName();
return Character.toUpperCase(name.charAt(0)) + name.substring(1);
}
public static String getterNameToDatabaseName(String _name) {
return toSnake(_name);
}
public static String getterNameToDatabaseName(String _name, CaseFormat _format) {
return convertCase(_name, CaseFormat.PASCAL, _format);
}
public static String convertCase(String _name, CaseFormat _inFormat, CaseFormat _outFormat) {
if (_inFormat == _outFormat)
return _name;
String pascal;
if (_inFormat == CaseFormat.SNAKE)
pascal = toPascal(_name);
else if (_inFormat == CaseFormat.CAMEL)
pascal = Character.toUpperCase(_name.charAt(0)) + _name.substring(1);
else
pascal = _name;
if (_outFormat == CaseFormat.SNAKE)
return toSnake(pascal);
if (_outFormat == CaseFormat.CAMEL)
return Character.toLowerCase(pascal.charAt(0)) + pascal.substring(1);
return pascal;
}
private static String toPascal(String _snake) {
StringBuilder field = new StringBuilder();
boolean charWasWordStart = true;
for (int i = 0; i < _snake.length(); i++) {
if (_snake.charAt(i) == '_')
charWasWordStart = true;
else {
field.append(charWasWordStart?Character.toUpperCase(_snake.charAt(i)):_snake.charAt(i));
charWasWordStart = false;
}
}
return field.toString();
}
private static String toSnake(String _pascal) {
StringBuilder field = null;
boolean charWasUpper = false;
for (int i = 0; i < _pascal.length(); i++) {
if (Character.isUpperCase(_pascal.charAt(i))) {
if (field == null) {
field = new StringBuilder();
field.append(Character.toLowerCase(_pascal.charAt(i)));
}
else if (!charWasUpper) {
field.append("_");
field.append(Character.toLowerCase(_pascal.charAt(i)));
}
else {
field.append(Character.toLowerCase(_pascal.charAt(i)));
}
charWasUpper = true;
}
else {
charWasUpper = false;
if (field == null) {
field = new StringBuilder();
}
field.append(_pascal.charAt(i));
}
}
return field.toString();
}
public static String fieldToDatabaseName(Field _field) {
return fieldToDatabaseName(_field, CaseFormat.SNAKE);
}
public static String fieldToDatabaseName(Field _field, CaseFormat _format) {
DBName name = _field.getAnnotation(DBName.class);
if (name != null)
return name.name();
if (_format == CaseFormat.CAMEL)
return _field.getName();
return getterNameToDatabaseName(fieldToGetterName(_field), _format);
}
public static Class<?> getType(Field _f) {
DBType type = _f.getAnnotation(DBType.class);
if (type != null)
return type.type();
return _f.getType();
}
public static boolean isSerializable(Field _f) {
return isSerializable(_f, false);
}
public static boolean isSerializable(Field _f, boolean _serializeObjects) {
if (Modifier.isStatic(_f.getModifiers()) || Modifier.isTransient(_f.getModifiers()) || _f.isAnnotationPresent(DBIgnore.class))
return false;
if (_serializeObjects)
return true;
return !requiresCustomSerializer(_f);
}
public static boolean requiresCustomSerializer(Field _f) {
Class<?> type = getType(_f);
if (Collection.class.isAssignableFrom(type))
type = getCollectionType(_f);
return !(type.isPrimitive() || type.isEnum() || NullUtils.isOneOf(type, String.class, Date.class, BigDecimal.class, byte[].class, Boolean.class, Double.class, Long.class, Integer.class, Float.class));
}
public static Class<?> getCollectionType(Field _f) {
if (Collection.class.isAssignableFrom(getType(_f)) && (_f.getGenericType() instanceof ParameterizedType)) {
ParameterizedType t = (ParameterizedType) _f.getGenericType();
if (t.getActualTypeArguments().length > 0) {
Type t2 = t.getActualTypeArguments()[0];
if (t2 instanceof Class)
return (Class<?>)t2;
}
}
return null;
}
@Override
public List<DaoProxyType> getSupportedProxies() {
return Collections.emptyList();
}
@Override
public List<DaoSort> getIndexes() {
return null;
}
}

View File

@@ -0,0 +1,79 @@
package com.lanternsoftware.util.dao;
import java.io.File;
import java.io.FileInputStream;
import java.lang.annotation.Annotation;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.io.IOUtils;
import com.lanternsoftware.util.NullUtils;
public abstract class AnnotationFinder {
public static Map<String, String> findAnnotatedClasses(String _codePath, Class<? extends Annotation> _annotationClass) {
String annotationName = "@" + _annotationClass.getSimpleName();
Map<String, String> mapClasses = new TreeMap<>();
searchFile(new File(_codePath), mapClasses, annotationName);
Iterator<String> classIter = mapClasses.keySet().iterator();
while (classIter.hasNext()) {
String className = classIter.next();
try {
Class<?> clazz = Class.forName(className);
if (!clazz.isAnnotationPresent(_annotationClass))
classIter.remove();
} catch (ClassNotFoundException _e) {
//ignore
}
}
return mapClasses;
}
public static Map<String, String> findSubclasses(String _codePath, Class<?> _superclass) {
Map<String, String> mapClasses = new TreeMap<>();
searchFile(new File(_codePath), mapClasses, _superclass.getSimpleName());
Iterator<String> classIter = mapClasses.keySet().iterator();
while (classIter.hasNext()) {
String className = classIter.next();
Class<?> clazz = NullUtils.getClass(className, _superclass);
if (clazz == null)
classIter.remove();
}
return mapClasses;
}
private static void searchFile(File _f, Map<String, String> _mapClasses, String _searchString) {
if (_f == null) {
return;
}
if (_f.isDirectory()) {
for (File child : _f.listFiles()) {
searchFile(child, _mapClasses, _searchString);
}
}
else if (_f.getName().endsWith(".java")) {
try {
String source = IOUtils.toString(new FileInputStream(_f));
if (!source.contains(_searchString)) {
return;
}
int packagePos = source.indexOf("package ");
int packageEnd = source.indexOf(";", packagePos);
String packageName = source.substring(packagePos + 8, packageEnd);
int classPos = source.indexOf("public class") + 12;
while (source.charAt(classPos) == ' ')
classPos++;
int newLineN = source.indexOf("\n", classPos);
int newLineR = source.indexOf("\r", classPos);
int space = source.indexOf(" ", classPos);
int classEnd = NullUtils.min((newLineN == -1) ? Integer.MAX_VALUE : newLineN, (newLineR == -1) ? Integer.MAX_VALUE : newLineR, (space == -1) ? Integer.MAX_VALUE : space);
String className = source.substring(classPos, classEnd);
_mapClasses.put(packageName + "." + className, _f.getParent());
}
catch (Exception e) {
e.printStackTrace();
}
}
}
}

View File

@@ -0,0 +1,100 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import org.bson.Document;
public class DaoEntity implements Map<String, Object> {
private final Document map;
public DaoEntity() {
map = new Document();
}
public DaoEntity(Document _doc) {
map = _doc == null?new Document():_doc;
}
public DaoEntity(Map<String, ?> _map) {
map = new Document();
for (Entry<String, ?> e : _map.entrySet()) {
map.put(e.getKey(), e.getValue());
}
}
public DaoEntity(String _name, Object _o) {
map = new Document();
put(_name, _o);
}
public DaoEntity and(String _name, Object _o) {
put(_name, _o);
return this;
}
@Override
public int size() {
return map.size();
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public boolean containsKey(Object key) {
return map.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return map.containsValue(value);
}
@Override
public Object get(Object key) {
return map.get(key);
}
@Override
public Object put(String key, Object value) {
return map.put(key, value);
}
@Override
public Object remove(Object key) {
return map.remove(key);
}
@Override
public void putAll(Map<? extends String, ?> m) {
map.putAll(m);
}
@Override
public void clear() {
map.clear();
}
@Override
public Set<String> keySet() {
return map.keySet();
}
@Override
public Collection<Object> values() {
return map.values();
}
@Override
public Set<Entry<String, Object>> entrySet() {
return map.entrySet();
}
public Document toDocument() {
return map;
}
}

View File

@@ -0,0 +1,32 @@
package com.lanternsoftware.util.dao;
import java.util.List;
public class DaoPage<T> {
private List<T> results;
private int totalResultCount;
public DaoPage() {
}
public DaoPage(List<T> _results, int _totalResultCount) {
results = _results;
totalResultCount = _totalResultCount;
}
public List<T> getResults() {
return results;
}
public void setResults(List<T> _results) {
results = _results;
}
public int getTotalResultCount() {
return totalResultCount;
}
public void setTotalResultCount(int _totalResultCount) {
totalResultCount = _totalResultCount;
}
}

View File

@@ -0,0 +1,10 @@
package com.lanternsoftware.util.dao;
public enum DaoProxyType {
JDBC,
SOLR,
KUDU,
MONGO,
EPHEMERAL,
DYNAMODB
}

View File

@@ -0,0 +1,211 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.Map;
import org.bson.Document;
public class DaoQuery extends Document {
public DaoQuery() {
}
public DaoQuery(Map<String, Object> map) {
super(map);
}
public DaoQuery(String _name, Object _o) {
put(_name, _o);
}
public DaoQuery and(String _name, Object _o) {
put(_name, _o);
return this;
}
public DaoQuery or(DaoQuery _query) {
put("$or", _query);
return this;
}
public DaoQuery andIgnoreCase(String _name, Object _o) {
put(_name, new DaoQuery("$equalIgnoreCase", _o));
return this;
}
public DaoQuery andNotEquals(String _name, Object _o) {
put(_name, new DaoQuery("$ne", _o));
return this;
}
public DaoQuery andIn(String _name, Collection<String> _values) {
put(_name, new DaoQuery("$in", _values));
return this;
}
public DaoQuery andNotIn(String _name, Collection<String> _values) {
put(_name, new DaoQuery("$nin", _values));
return this;
}
public DaoQuery andInLongs(String _name, Collection<Long> _values) {
put(_name, new DaoQuery("$in", _values));
return this;
}
public DaoQuery andNotInLongs(String _name, Collection<Long> _values) {
put(_name, new DaoQuery("$nin", _values));
return this;
}
public DaoQuery andInIntegers(String _name, Collection<Integer> _values) {
put(_name, new DaoQuery("$in", _values));
return this;
}
public DaoQuery andNotInIntegers(String _name, Collection<Integer> _values) {
put(_name, new DaoQuery("$nin", _values));
return this;
}
public DaoQuery andGt(String _name, Object _o) {
put(_name, new DaoQuery("$gt", _o));
return this;
}
public DaoQuery andLt(String _name, Object _o) {
put(_name, new DaoQuery("$lt", _o));
return this;
}
public DaoQuery andGte(String _name, Object _o) {
put(_name, new DaoQuery("$gte", _o));
return this;
}
public DaoQuery andLte(String _name, Object _o) {
put(_name, new DaoQuery("$lte", _o));
return this;
}
public DaoQuery andBetween(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gt", _lowerBound).and("$lt", _upperBound));
return this;
}
public DaoQuery andBetweenInclusive(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gte", _lowerBound).and("$lte", _upperBound));
return this;
}
public DaoQuery andBetweenInclusiveExclusive(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gte", _lowerBound).and("$lt", _upperBound));
return this;
}
public DaoQuery andBetweenExclusiveInclusive(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gt", _lowerBound).and("$lte", _upperBound));
return this;
}
public DaoQuery andStartsWith(String _name, String _o) {
put(_name, new DaoQuery("$startsWith", _o));
return this;
}
public DaoQuery andStartsWithIgnoreCase(String _name, String _o) {
put(_name, new DaoQuery("$startsWithIgnoreCase", _o));
return this;
}
public DaoQuery andContains(String _name, String _o) {
put(_name, new DaoQuery("$contains", _o));
return this;
}
public DaoQuery andContainsIgnoreCase(String _name, String _o) {
put(_name, new DaoQuery("$containsIgnoreCase", _o));
return this;
}
public DaoQuery andNull(String _name) {
put(_name, "$null");
return this;
}
public DaoQuery andNotNull(String _name) {
put(_name, "$notnull");
return this;
}
public static DaoQuery notEquals(String _name, Object _value) {
return new DaoQuery().andNotEquals(_name, _value);
}
public static DaoQuery in(String _name, Collection<String> _values) {
return new DaoQuery().andIn(_name, _values);
}
public static DaoQuery notIn(String _name, Collection<String> _values) {
return new DaoQuery().andNotIn(_name, _values);
}
public static DaoQuery inLongs(String _name, Collection<Long> _values) {
return new DaoQuery().andInLongs(_name, _values);
}
public static DaoQuery notInLongs(String _name, Collection<Long> _values) {
return new DaoQuery().andNotInLongs(_name, _values);
}
public static DaoQuery inIntegers(String _name, Collection<Integer> _values) {
return new DaoQuery().andInIntegers(_name, _values);
}
public static DaoQuery notInIntegers(String _name, Collection<Integer> _values) {
return new DaoQuery().andNotInIntegers(_name, _values);
}
public static DaoQuery gt(String _name, Object _value) {
return new DaoQuery().andGt(_name, _value);
}
public static DaoQuery lt(String _name, Object _value) {
return new DaoQuery().andLt(_name, _value);
}
public static DaoQuery gte(String _name, Object _value) {
return new DaoQuery().andGte(_name, _value);
}
public static DaoQuery lte(String _name, Object _value) {
return new DaoQuery().andLte(_name, _value);
}
public static DaoQuery between(String _name, Object _lowerBound, Object _upperBound) {
return new DaoQuery().andBetween(_name, _lowerBound, _upperBound);
}
public static DaoQuery startsWith(String _name, String _value) {
return new DaoQuery().andStartsWith(_name, _value);
}
public static DaoQuery startsWithIgnoreCase(String _name, String _value) {
return new DaoQuery().andStartsWithIgnoreCase(_name, _value);
}
public static DaoQuery contains(String _name, String _value) {
return new DaoQuery().andContains(_name, _value);
}
public static DaoQuery containsIgnoreCase(String _name, String _value) {
return new DaoQuery().andContainsIgnoreCase(_name, _value);
}
public static DaoQuery isNull(String _name) {
return new DaoQuery().andNull(_name);
}
public static DaoQuery notNull(String _name) {
return new DaoQuery().andNotNull(_name);
}
}

View File

@@ -0,0 +1,913 @@
package com.lanternsoftware.util.dao;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import org.bson.BsonBinaryReader;
import org.bson.BsonBinaryWriter;
import org.bson.Document;
import org.bson.codecs.BsonTypeClassMap;
import org.bson.codecs.BsonValueCodecProvider;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.DocumentCodec;
import org.bson.codecs.DocumentCodecProvider;
import org.bson.codecs.EncoderContext;
import org.bson.codecs.IterableCodec;
import org.bson.codecs.ValueCodecProvider;
import org.bson.codecs.configuration.CodecRegistries;
import org.bson.io.BasicOutputBuffer;
import org.bson.json.Converter;
import org.bson.json.JsonReader;
import org.bson.json.JsonWriterSettings;
import org.bson.json.StrictJsonWriter;
import org.bson.types.Binary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.DateUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.ZipUtils;
import com.lanternsoftware.util.dao.annotations.DBIndex;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
public class DaoSerializer {
private static final Logger LOG = LoggerFactory.getLogger(DaoSerializer.class);
private static final Map<Class<?>, List<IDaoSerializer>> serializers = new HashMap<>();
static {
for (IDaoSerializer serializer : ServiceLoader.load(IDaoSerializer.class)) {
CollectionUtils.addToMultiMap(serializer.getSupportedClass(), serializer, serializers);
}
}
public static void addSerializer(IDaoSerializer<?> _serializer) {
CollectionUtils.addToMultiMap(_serializer.getSupportedClass(), _serializer, serializers);
}
public static <T> IDaoSerializer<T> getSerializer(Class<T> _class) {
return getSerializer(_class, null);
}
public static <T> IDaoSerializer<T> getSerializer(Class<T> _class, DaoProxyType _proxyType) {
List<IDaoSerializer> classSerializers = serializers.get(_class);
if (classSerializers == null) {
LOG.error("No serializer exists for class " + _class.getCanonicalName());
return null;
}
if (_proxyType != null) {
for (IDaoSerializer serializer : classSerializers) {
if (serializer.getSupportedProxies().contains(_proxyType))
return serializer;
}
}
return CollectionUtils.getFirst(classSerializers);
}
public static DaoEntity toDaoEntity(Object _o) {
return toDaoEntity(_o, null);
}
public static DaoEntity toDaoEntity(Object _o, DaoProxyType _proxyType) {
if (_o == null) {
return null;
}
if (_o instanceof DaoEntity)
return (DaoEntity) _o;
if (_o instanceof Document)
return new DaoEntity((Document) _o);
IDaoSerializer serializer = getSerializer(_o.getClass(), _proxyType);
if (serializer == null)
return null;
try {
return serializer.toDaoEntity(_o);
}
catch (Exception _e) {
LOG.error("Failed to serialize entity", _e);
return null;
}
}
public static <T> T fromDaoEntity(DaoEntity _entity, Class<T> _class) {
return fromDaoEntity(_entity, _class, null);
}
public static <T> T fromDaoEntity(DaoEntity _entity, Class<T> _class, DaoProxyType _proxyType) {
if (_entity == null)
return null;
if (_class == DaoEntity.class)
return _class.cast(_entity);
if (_class == DaoQuery.class)
return _class.cast(new DaoQuery(_entity));
IDaoSerializer<T> serializer = getSerializer(_class, _proxyType);
if (serializer == null)
return null;
return serializer.fromDaoEntity(_entity);
}
public static String getTableName(Class<?> _class) {
return getTableName(_class, null);
}
public static String getTableName(Class<?> _class, DaoProxyType _proxyType) {
IDaoSerializer<?> serializer = getSerializer(_class, _proxyType);
if (serializer == null)
return null;
return serializer.getTableName();
}
public static List<String> getFieldsByAnnotation(Class<?> _entityClass, Class<? extends Annotation> _fieldAnnotation) {
return getFieldsByAnnotation(_entityClass, _fieldAnnotation, null);
}
public static List<String> getFieldsByAnnotation(Class<?> _entityClass, Class<? extends Annotation> _fieldAnnotation, DaoProxyType _proxyType) {
if (_entityClass == null) {
return Collections.emptyList();
}
IDaoSerializer<?> serializer = getSerializer(_entityClass, _proxyType);
if (serializer == null)
return Collections.emptyList();
return serializer.getFieldsByAnnotation(_fieldAnnotation);
}
public static List<String> getImportantFields(Class<?> _entityClass) {
if (_entityClass == null) {
return Collections.emptyList();
}
IDaoSerializer<?> serializer = getSerializer(_entityClass, null);
if (serializer == null)
return Collections.emptyList();
return serializer.getImportantFields();
}
public static int getSqlType(Class<?> _entityClass, String _fieldName) {
if (_entityClass == null) {
return Types.NULL;
}
IDaoSerializer<?> serializer = getSerializer(_entityClass, DaoProxyType.JDBC);
if (serializer == null) {
return Types.NULL;
}
return serializer.getSqlType(_fieldName);
}
public static int compare(DaoEntity _e, String _field, Object _comp) {
if (_comp instanceof String)
return NullUtils.compare(getString(_e, _field), (String) _comp);
if (_comp instanceof Date)
return NullUtils.compare(getDate(_e, _field), (Date) _comp);
if (_comp instanceof Long)
return NullUtils.compare(getLong(_e, _field), (Long) _comp);
if (_comp instanceof Short)
return NullUtils.compare(getShort(_e, _field), (Short) _comp);
if (_comp instanceof BigDecimal)
return NullUtils.compare(getBigDecimal(_e, _field), (BigDecimal) _comp);
if (_comp instanceof Double)
return NullUtils.compare(getDouble(_e, _field), (Double) _comp);
if (_comp instanceof Float)
return NullUtils.compare(getFloat(_e, _field), (Float) _comp);
if (_comp instanceof Integer)
return NullUtils.compare(getInteger(_e, _field), (Integer) _comp);
if (_comp instanceof Boolean)
return NullUtils.compare(getBoolean(_e, _field), (Boolean) _comp);
if (_comp instanceof Enum)
return NullUtils.compare(getString(_e, _field), ((Enum) _comp).name());
return 0;
}
public static String getString(DaoEntity _e, String _field) {
if (_e == null) {
return null;
}
return toString(_e.get(_field));
}
public static String toString(Object _o) {
if (_o instanceof String)
return (String) _o;
if (_o != null)
return String.valueOf(_o);
return null;
}
public static String getId(DaoEntity _e, Class<?> _entityClass) {
if (_e == null) {
return null;
}
String sPrimaryKeyField = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_entityClass, PrimaryKey.class));
if (NullUtils.isEmpty(sPrimaryKeyField)) {
sPrimaryKeyField = "_id";
}
return getString(_e, sPrimaryKeyField);
}
public static Date getDate(DaoEntity _e, String _field, long _lNullValue) {
if ((_e == null) || (!_e.containsKey(_field))) {
return null;
}
Object o = _e.get(_field);
if (o == null) {
return null;
}
if (o instanceof Timestamp) {
return new Date(((Timestamp) o).getTime());
}
if (o instanceof Date) {
return (Date) o;
}
long lDate = toLong(o);
if (lDate == _lNullValue) {
return null;
}
return new Date(lDate);
}
public static Date getDate(DaoEntity _e, String _sField) {
return getDate(_e, _sField, Long.MIN_VALUE);
}
public static Date getDate(DaoEntity _e, String _sField, String _format) {
return DateUtils.parse(_format, getString(_e, _sField));
}
public static Timestamp toTimestamp(Date _dt) {
if (_dt == null) {
return null;
}
return new Timestamp(_dt.getTime());
}
public static long toLong(Date _dt) {
return toLong(_dt, Long.MIN_VALUE);
}
public static long toLong(Date _dt, long _lNullValue) {
if (_dt == null) {
return _lNullValue;
}
return _dt.getTime();
}
public static short getShort(DaoEntity _e, String _sField) {
if (_e == null) {
return 0;
}
return toShort(_e.get(_sField));
}
public static short toShort(Object _o) {
try {
if (_o instanceof Short) {
return (Short) _o;
}
if (_o instanceof Integer) {
return ((Integer) _o).shortValue();
}
if (_o instanceof Long) {
return ((Long) _o).shortValue();
}
if (_o instanceof Double) {
return ((Double) _o).shortValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? (short) 1 : (short) 0;
}
if (_o instanceof String) {
return Short.valueOf((String) _o);
}
return (short) 0;
}
catch (Exception _e) {
return (short) 0;
}
}
public static BigDecimal getBigDecimal(DaoEntity _e, String _sField) {
if (_e == null) {
return new BigDecimal(0);
}
return toBigDecimal(_e.get(_sField));
}
public static BigDecimal toBigDecimal(Object _o) {
try {
if (_o instanceof BigDecimal) {
return (BigDecimal) _o;
}
if (_o instanceof Double) {
return new BigDecimal((Double) _o);
}
if (_o instanceof Integer) {
return new BigDecimal((Integer) _o);
}
if (_o instanceof Short) {
return new BigDecimal((Short) _o);
}
if (_o instanceof Long) {
return new BigDecimal((Long) _o);
}
if (_o instanceof Boolean) {
return new BigDecimal(((Boolean) _o) ? 1 : 0);
}
if (_o instanceof String) {
return new BigDecimal((String) _o);
}
return new BigDecimal(0);
}
catch (Exception _e) {
return new BigDecimal(0);
}
}
public static double getDouble(DaoEntity _e, String _sField) {
if (_e == null) {
return 0.0;
}
return toDouble(_e.get(_sField));
}
public static double toDouble(Object _o) {
try {
if (_o instanceof Double) {
return ((Double) _o).doubleValue();
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).doubleValue();
}
if (_o instanceof Integer) {
return ((Integer) _o).doubleValue();
}
if (_o instanceof Short) {
return ((Short) _o).doubleValue();
}
if (_o instanceof Long) {
return ((Long) _o).doubleValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1.0 : 0.0;
}
if (_o instanceof String) {
return Double.valueOf((String) _o);
}
return 0.0;
}
catch (Exception _e) {
return 0.0;
}
}
public static float getFloat(DaoEntity _e, String _sField) {
if (_e == null) {
return 0.0f;
}
return toFloat(_e.get(_sField));
}
public static float toFloat(Object _o) {
try {
if (_o instanceof Float) {
return ((Float) _o).floatValue();
}
if (_o instanceof Double) {
return ((Double) _o).floatValue();
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).floatValue();
}
if (_o instanceof Integer) {
return ((Integer) _o).floatValue();
}
if (_o instanceof Short) {
return ((Short) _o).floatValue();
}
if (_o instanceof Long) {
return ((Long) _o).floatValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1.0f : 0.0f;
}
if (_o instanceof String) {
return Float.valueOf((String) _o);
}
return 0.0f;
}
catch (Exception _e) {
return 0.0f;
}
}
public static int getInteger(DaoEntity _e, String _sField) {
if (_e == null) {
return 0;
}
return toInteger(_e.get(_sField));
}
public static int toInteger(Object _o) {
try {
if (_o instanceof Integer) {
return (Integer) _o;
}
if (_o instanceof Short) {
return ((Short) _o).intValue();
}
if (_o instanceof Long) {
return ((Long) _o).intValue();
}
if (_o instanceof Double) {
return ((Double) _o).intValue();
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).intValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1 : 0;
}
if (_o instanceof String) {
return Integer.valueOf((String) _o);
}
return 0;
}
catch (Exception _e) {
return 0;
}
}
public static long getLong(DaoEntity _e, String _sField) {
if (_e == null) {
return 0l;
}
return toLong(_e.get(_sField));
}
public static long toLong(Object _o) {
try {
if (_o instanceof Integer) {
return ((Integer) _o).longValue();
}
if (_o instanceof Short) {
return ((Short) _o).longValue();
}
if (_o instanceof Long) {
return (Long) _o;
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).longValue();
}
if (_o instanceof Double) {
return ((Double) _o).longValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1L : 0L;
}
if (_o instanceof String) {
return Long.valueOf((String) _o);
}
return 0L;
}
catch (Exception _e) {
return 0L;
}
}
public static boolean getBoolean(DaoEntity _e, String _field) {
if (_e == null) {
return false;
}
return toBoolean(_e.get(_field));
}
public static boolean toBoolean(Object _o) {
return toBoolean(_o, false);
}
public static boolean toBoolean(Object _o, boolean _default) {
if (_o instanceof Boolean)
return (Boolean) _o;
if (_o instanceof String)
return ((String) _o).equalsIgnoreCase("true") || _o.equals("1");
if (_o instanceof Integer)
return ((Integer) _o) != 0;
if (_o instanceof Long)
return ((Long) _o) != 0;
if (_o instanceof BigDecimal)
return !(_o).equals(BigDecimal.ZERO);
return _default;
}
public static byte[] getByteArray(DaoEntity _e, String _sField) {
if (_e == null) {
return null;
}
Object o = _e.get(_sField);
if (o instanceof Binary)
return ((Binary)o).getData();
if (o instanceof byte[])
return (byte[]) o;
return null;
}
public static String toEnumName(Enum<?> _enum) {
if (_enum == null)
return "";
return _enum.name();
}
public static <T extends Enum<T>> List<String> toEnumNames(Collection<T> _enums) {
return CollectionUtils.transform(_enums, new ITransformer<T, String>() {
@Override
public String transform(T _enum) {
return toEnumName(_enum);
}
});
}
public static <T extends Enum<T>> T getEnum(DaoEntity _e, String _sField, Class<T> _enumType) {
return NullUtils.toEnum(_enumType, getString(_e, _sField));
}
public static <T extends Enum<T>> T getEnum(DaoEntity _e, String _sField, Class<T> _enumType, T _default) {
return NullUtils.toEnum(_enumType, getString(_e, _sField), _default);
}
public static <T extends Enum<T>> List<T> toEnums(Collection<String> _enumNames, final Class<T> _enumType) {
return CollectionUtils.transform(_enumNames, new ITransformer<String, T>() {
@Override
public T transform(String _s) {
return NullUtils.toEnum(_enumType, _s);
}
});
}
public static DaoEntity getDaoEntity(DaoEntity _e, String _field) {
if (_e == null)
return null;
return asDaoEntity(_e.get(_field));
}
public static DaoEntity asDaoEntity(Object _o) {
if (_o instanceof Document)
return new DaoEntity((Document) _o);
if (_o instanceof DaoEntity)
return (DaoEntity) _o;
return null;
}
public static <T> T getObject(DaoEntity _e, String _field, Class<T> _class) {
return getObject(_e, _field, _class, null);
}
public static <T> T getObject(DaoEntity _e, String _field, Class<T> _class, DaoProxyType _proxyType) {
return fromDaoEntity(getDaoEntity(_e, _field), _class, _proxyType);
}
public static List<DaoEntity> toDaoEntities(Collection<? extends Object> _objects) {
return toDaoEntities(_objects, null);
}
public static List<DaoEntity> toDaoEntities(Collection<? extends Object> _objects, DaoProxyType _proxyType) {
List<DaoEntity> entities = new ArrayList<>(CollectionUtils.size(_objects));
for (Object o : CollectionUtils.makeNotNull(_objects)) {
entities.add(toDaoEntity(o, _proxyType));
}
return entities;
}
public static List<DaoEntity> getDaoEntityList(DaoEntity _d, String _field) {
return getDaoEntityList(_d, _field, null);
}
public static List<DaoEntity> getDaoEntityList(DaoEntity _d, String _field, DaoProxyType _proxyType) {
Object list = (_d == null) ? null : _d.get(_field);
if (list instanceof Collection)
return toDaoEntities((Collection<?>) list, _proxyType);
return new ArrayList<>();
}
public static <T> List<T> getList(DaoEntity _d, String _sField, Class<T> _classOfT) {
return getList(_d, _sField, _classOfT, null);
}
public static <T> List<T> getList(DaoEntity _d, String _sField, Class<T> _classOfT, DaoProxyType _proxyType) {
if ((_d == null) || (!_d.containsKey(_sField)))
return new ArrayList<T>();
return fromList(_d.get(_sField), _classOfT, _proxyType);
}
public static <T> List<T> fromList(Object _list, Class<T> _classOfT) {
return fromList(_list, _classOfT, null);
}
public static <T> List<T> fromList(Object _list, Class<T> _classOfT, DaoProxyType _proxyType) {
if (_list instanceof List)
return fromList((List<?>) _list, _classOfT, _proxyType);
return new ArrayList<>();
}
public static <T> List<T> fromList(List<?> _list, Class<T> _classOfT) {
return fromList(_list, _classOfT, null);
}
public static <T> List<T> fromList(List<?> _list, Class<T> _classOfT, DaoProxyType _proxyType) {
List<T> objects = new ArrayList<>(CollectionUtils.size(_list));
for (Object object : CollectionUtils.makeNotNull(_list)) {
if (_classOfT.isInstance(object))
objects.add(_classOfT.cast(object));
else if (object instanceof Document)
objects.add(fromDaoEntity(new DaoEntity((Document) object), _classOfT, _proxyType));
else if (object instanceof DaoEntity)
objects.add(fromDaoEntity((DaoEntity) object, _classOfT, _proxyType));
}
return objects;
}
public static List<Field> getSerializableFields(Class<?> _class) {
return getSerializableFields(_class, false);
}
public static List<Field> getSerializableFields(Class<?> _class, boolean _serializeObjects) {
List<Field> fields = new ArrayList<Field>();
addSerializableFields(_class, fields, _serializeObjects);
return fields;
}
private static void addSerializableFields(Class<?> _class, List<Field> _fields, boolean _serializeObjects) {
if (_class == null) {
return;
}
for (Field field : _class.getDeclaredFields()) {
if (AbstractDaoSerializer.isSerializable(field, _serializeObjects))
_fields.add(field);
}
addSerializableFields(_class.getSuperclass(), _fields, _serializeObjects);
}
public static Set<String> getIndexedFields(Class<?> _class) {
return getIndexedFields(_class, null);
}
public static Set<String> getHashIndexFields(Class<?> _class) {
return getIndexedFields(_class, true);
}
public static Set<String> getRangeIndexFields(Class<?> _class) {
return getIndexedFields(_class, false);
}
private static Set<String> getIndexedFields(Class<?> _class, Boolean _hash) {
Set<String> indexedFields = new HashSet<String>();
DBSerializable def = _class.getAnnotation(DBSerializable.class);
if (def != null) {
for (DBIndex index : def.indexes()) {
if ((_hash == null) || (_hash == index.hash())) {
Collections.addAll(indexedFields, index.columns());
}
}
}
return indexedFields;
}
public static boolean isAnnotationPresent(Class<?> _class, Class<? extends Annotation> _fieldAnnotation) {
if (_class == null)
return false;
if (_class.isAnnotationPresent(_fieldAnnotation))
return true;
return isAnnotationPresent(_class.getSuperclass(), _fieldAnnotation);
}
public static <T extends Annotation> T getAnnotation(Class<?> _class, Class<T> _fieldAnnotation) {
if (_class == null)
return null;
T a = _class.getAnnotation(_fieldAnnotation);
if (a != null)
return a;
return getAnnotation(_class.getSuperclass(), _fieldAnnotation);
}
public static String toJson(Object _o) {
return toJson(_o, true);
}
public static String toSingleLineJson(Object _o) {
return toJson(toDaoEntity(_o), true, false);
}
public static String toSingleLineJson(DaoEntity _e) {
return toJson(_e, true, false);
}
public static byte[] toZipBson(Object _o) {
return toZipBson(toDaoEntity(_o));
}
public static byte[] toZipBson(DaoEntity _entity) {
if (_entity == null)
return null;
return ZipUtils.zip(toBson(_entity, true));
}
public static <T> T fromZipBson(byte[] _btZipBson, Class<T> _class) {
return DaoSerializer.fromDaoEntity(fromZipBson(_btZipBson), _class);
}
public static DaoEntity fromZipBson(byte[] _btZipBson) {
return fromBson(ZipUtils.unzip(_btZipBson));
}
public static <T> T fromBson(byte[] _btBson, Class<T> _class) {
return fromDaoEntity(fromBson(_btBson), _class);
}
public static DaoEntity fromBson(byte[] _btBson)
{
if (_btBson == null)
return null;
BsonBinaryReader reader = null;
try
{
reader = new BsonBinaryReader(ByteBuffer.wrap(_btBson).order(ByteOrder.LITTLE_ENDIAN));
Document doc = new DocumentCodec().decode(reader, DecoderContext.builder().build());
if (doc == null)
return null;
return new DaoEntity(doc);
}
catch (Throwable t)
{
LOG.error("Failed to convert bson to DaoEntity", t);
return null;
}
finally
{
if (reader != null)
reader.close();
}
}
public static byte[] toBson(Object _o) {
return toBson(toDaoEntity(_o));
}
public static byte[] toBson(Object _o, boolean _removeNulls) {
DaoEntity entity = toDaoEntity(_o);
if (_removeNulls)
removeNulls(entity.values());
return toBson(entity);
}
public static byte[] toBson(DaoEntity _entity) {
if (_entity == null)
return null;
BsonBinaryWriter writer = null;
try
{
BasicOutputBuffer buffer = new BasicOutputBuffer();
writer = new BsonBinaryWriter(buffer);
new DocumentCodec().encode(writer, _entity.toDocument(), EncoderContext.builder().build());
return buffer.toByteArray();
}
catch (Throwable t)
{
LOG.error("Failed to convert entity to BSON", t);
return null;
}
finally
{
if (writer != null)
writer.close();
}
}
public static String toJson(Object _o, boolean _removeNulls) {
return toJson(toDaoEntity(_o), _removeNulls);
}
public static String toJson(DaoEntity _e) {
return toJson(_e, true);
}
public static String toJson(DaoEntity _e, boolean _removeNulls) {
return toJson(_e, _removeNulls, true);
}
public static String toJson(DaoEntity _e, boolean _removeNulls, boolean _pretty) {
try {
if (_e != null) {
Document doc = _e.toDocument();
if (_removeNulls)
removeNulls(doc.values());
JsonWriterSettings.Builder settings = JsonWriterSettings.builder().int64Converter(new Converter<Long>() {
@Override
public void convert(Long _long, StrictJsonWriter _writer) {
if (_long != null)
_writer.writeNumber(_long.toString());
}
});
return doc.toJson(settings.indent(_pretty).build());
}
}
catch (Exception e) {
LOG.error("Failed to convert DaoEntity to json", e);
}
return null;
}
private static void removeNulls(Collection<Object> _doc) {
if (_doc == null)
return;
Iterator<Object> values = _doc.iterator();
while (values.hasNext()) {
Object o = values.next();
if (o == null)
values.remove();
else if (o instanceof DaoEntity)
removeNulls(((DaoEntity) o).values());
else if (o instanceof Document)
removeNulls(((Document) o).values());
else if (o instanceof Collection) {
Collection<Object> entities = (Collection<Object>) o;
removeNulls(entities);
if (entities.isEmpty())
values.remove();
}
}
}
public static String toJson(Collection<DaoEntity> _entities) {
StringBuilder b = null;
for (DaoEntity d : CollectionUtils.makeNotNull(_entities)) {
if (b == null)
b = new StringBuilder("[");
else
b.append(",");
b.append(toJson(d));
}
if (b == null)
return null;
b.append("]");
return b.toString();
}
public static <T> T parse(byte[] _json, Class<T> _class) {
return fromDaoEntity(parse(NullUtils.toString(_json)), _class);
}
public static <T> T parse(String _json, Class<T> _class) {
return fromDaoEntity(parse(_json), _class);
}
public static DaoEntity parse(String _json) {
if (NullUtils.isEmpty(_json))
return null;
try {
return new DaoEntity(Document.parse(_json));
}
catch (Exception _e) {
LOG.error("Failed to parse json", _e);
return null;
}
}
public static <T> List<T> parseList(String _json, Class<T> _class) {
return fromList(parseList(_json), _class);
}
public static List<DaoEntity> parseList(String _json) {
try {
List<DaoEntity> entities = new ArrayList<>();
JsonReader bsonReader = new JsonReader(_json);
for (Object o : new IterableCodec(CodecRegistries.fromProviders(Arrays.asList(new ValueCodecProvider(), new BsonValueCodecProvider(), new DocumentCodecProvider())), new BsonTypeClassMap()).decode(bsonReader, DecoderContext.builder().build())) {
if (o instanceof Document)
entities.add(new DaoEntity((Document) o));
}
return entities;
}
catch (Exception _e) {
LOG.error("Failed to parse json", _e);
return null;
}
}
public List<DaoSort> getIndexes(Class<?> _class) {
IDaoSerializer<?> serializer = getSerializer(_class);
return (serializer == null) ? new ArrayList<DaoSort>() : serializer.getIndexes();
}
}

View File

@@ -0,0 +1,15 @@
package com.lanternsoftware.util.dao;
public class DaoSerializerException extends RuntimeException {
public DaoSerializerException() {
}
public DaoSerializerException(String message) {
super(message);
}
public DaoSerializerException(String message, Throwable cause) {
super(message, cause);
}
public DaoSerializerException(Throwable cause) {
super(cause);
}
}

View File

@@ -0,0 +1,72 @@
package com.lanternsoftware.util.dao;
import java.util.ArrayList;
import java.util.List;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
public class DaoSort {
private final List<DaoSortField> fields = new ArrayList<DaoSortField>();
public DaoSort then(String _field) {
return then(_field, true);
}
public DaoSort thenIgnoreCase(String _field) {
return then(_field, true, true);
}
public DaoSort thenDesc(String _field) {
return then(_field, false);
}
public DaoSort thenDescIgnoreCase(String _field) {
return then(_field, false, true);
}
public DaoSort then(String _field, boolean _ascending) {
return then(_field, _ascending, false);
}
public DaoSort then(String _field, boolean _ascending, boolean _ignoreCase) {
fields.add(new DaoSortField(_field, _ascending, false));
return this;
}
public static DaoSort sort(String _field) {
return new DaoSort().then(_field);
}
public static DaoSort sortIgnoreCase(String _field) {
return new DaoSort().thenIgnoreCase(_field);
}
public static DaoSort sortDesc(String _field) {
return new DaoSort().thenDesc(_field);
}
public static DaoSort sortDescIgnoreCase(String _field) {
return new DaoSort().thenDescIgnoreCase(_field);
}
public List<DaoSortField> getFields() {
return fields;
}
public static DaoSort fromQueryParams(List<String> _queryParams) {
return fromQueryParams(_queryParams, CaseFormat.CAMEL, CaseFormat.SNAKE);
}
public static DaoSort fromQueryParams(List<String> _queryParams, final CaseFormat _paramFormat, final CaseFormat _dbFormat) {
DaoSort sort = new DaoSort();
sort.fields.addAll(CollectionUtils.transform(_queryParams, new ITransformer<String, DaoSortField>() {
@Override
public DaoSortField transform(String _s) {
return DaoSortField.fromQueryParam(_s, _paramFormat, _dbFormat);
}
}));
return sort;
}
}

View File

@@ -0,0 +1,53 @@
package com.lanternsoftware.util.dao;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
public class DaoSortField {
private String field;
private boolean ascending;
private boolean ignoreCase;
public DaoSortField() {
}
public DaoSortField(String _field, boolean _ascending, boolean _ignoreCase) {
field = _field;
ascending = _ascending;
}
public String getField() {
return field;
}
public void setField(String _field) {
field = _field;
}
public boolean isAscending() {
return ascending;
}
public void setAscending(boolean _ascending) {
ascending = _ascending;
}
public boolean isIgnoreCase() {
return ignoreCase;
}
public void setIgnoreCase(boolean _ignoreCase) {
ignoreCase = _ignoreCase;
}
public static DaoSortField fromQueryParam(String _param) {
return fromQueryParam(_param, CaseFormat.CAMEL, CaseFormat.SNAKE);
}
public static DaoSortField fromQueryParam(String _param, CaseFormat _paramFormat, CaseFormat _dbFormat) {
if (NullUtils.isEmpty(_param))
return null;
String[] parts = _param.split(",");
return new DaoSortField(AbstractDaoSerializer.convertCase(parts[0], _paramFormat, _dbFormat), !(parts.length > 1 && NullUtils.isEqual(parts[1], "desc")), false);
}
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import com.lanternsoftware.util.CollectionUtils;
public abstract class EntityPreparer {
public Collection<DaoEntity> prepareEntities(Collection<DaoEntity> _entities) {
for (DaoEntity entity : CollectionUtils.makeNotNull(_entities)) {
prepareEntity(entity);
}
return _entities;
}
public abstract void prepareEntity(DaoEntity _entity);
}

View File

@@ -0,0 +1,66 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
public interface IDaoProxy {
void shutdown();
DaoProxyType getType();
<T> List<T> queryAll(Class<T> _class);
<T> T queryOne(Class<T> _class, DaoQuery _query);
<T> T queryOne(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T> List<T> query(Class<T> _class, DaoQuery _query);
<T> List<T> query(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T, V> Future<List<V>> queryWithFinalizer(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, QueryFinalizer<T, V> _finalizer);
<T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
<T> DaoPage<T> queryPage(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query);
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query);
<T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count);
<T> DaoPage<T> queryImportantPage(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count);
DaoEntity queryForEntity(String _tableName, DaoQuery _query);
DaoEntity queryForEntity(String _tableName, DaoQuery _query, DaoSort _sort);
DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields);
DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
DaoPage<DaoEntity> queryForEntitiesPage(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, DaoSort _sort);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
String queryForOneField(Class<?> _class, DaoQuery _query, String _field);
List<String> queryForField(Class<?> _class, DaoQuery _query, String _field);
List<String> queryForField(Class<?> _class, DaoQuery _query, String _field, DaoSort _sort);
List<String> queryForField(String _tableName, DaoQuery _query, String _field);
String save(Object _object);
<T> Map<String, T> save(Collection<T> _objects);
Map<String, DaoEntity> save(Class<?> _class, Collection<DaoEntity> _entities);
void update(Class<?> _class, DaoQuery _query, DaoEntity _changes);
<T> T updateOne(Class<T> _class, DaoQuery _query, DaoEntity _changes);
String saveEntity(Class<?> _class, DaoEntity _entity);
String saveEntity(String _collection, DaoEntity _entity);
boolean delete(Class<?> _class, DaoQuery _query);
boolean delete(String _tableName, DaoQuery _query);
int count(Class<?> _class, DaoQuery _query);
int count(String _tableName, DaoQuery _query);
boolean exists(Class<?> _class, DaoQuery _query);
boolean exists(String _tableName, DaoQuery _query);
void setExecutor(ExecutorService _executor);
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.dao;
import java.lang.annotation.Annotation;
import java.util.List;
public interface IDaoSerializer<T> {
Class<T> getSupportedClass();
String getTableName();
List<String> getFieldsByAnnotation(Class<? extends Annotation> _fieldAnnotation);
List<String> getImportantFields();
int getSqlType(String _fieldName);
DaoEntity toDaoEntity(T _t);
T fromDaoEntity(DaoEntity _entity);
List<DaoProxyType> getSupportedProxies();
List<DaoSort> getIndexes();
}

View File

@@ -0,0 +1,38 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
public class QueryExecution<V> implements Callable<List<V>> {
private final IDaoProxy proxy;
private final Class<V> clazz;
private final DaoQuery query;
private final Collection<String> fields;
private final DaoSort sort;
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query) {
this(_proxy, _class, _query, null, null);
}
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, DaoSort _sort) {
this(_proxy, _class, _query, null, _sort);
}
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields) {
this(_proxy, _class, _query, _fields, null);
}
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
proxy = _proxy;
clazz = _class;
query = _query;
fields = _fields;
sort = _sort;
}
@Override
public List<V> call() throws Exception {
return proxy.query(clazz, query, fields, sort);
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util.dao;
import java.util.List;
public abstract class QueryFinalizer<I, O> {
public abstract List<O> finalize(IDaoProxy _proxy, List<I> _input);
}

View File

@@ -0,0 +1,28 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
public class QueryFinalizerExecution<I,O> implements Callable<List<O>> {
private final IDaoProxy proxy;
private final Class<I> clazz;
private final DaoQuery query;
private final Collection<String> fields;
private final DaoSort sort;
private final QueryFinalizer<I, O> finalizer;
public QueryFinalizerExecution(IDaoProxy _proxy, Class<I> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, QueryFinalizer<I,O> _finalizer) {
proxy = _proxy;
clazz = _class;
query = _query;
fields = _fields;
sort = _sort;
finalizer = _finalizer;
}
@Override
public List<O> call() {
return finalizer.finalize(proxy, proxy.query(clazz, query, fields, sort));
}
}

View File

@@ -0,0 +1,37 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.concurrent.Callable;
public class QueryOneExecution<V> implements Callable<V> {
private final IDaoProxy proxy;
private final Class<V> clazz;
private final DaoQuery query;
private final Collection<String> fields;
private final DaoSort sort;
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query) {
this(_proxy, _class, _query, null, null);
}
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, DaoSort _sort) {
this(_proxy, _class, _query, null, _sort);
}
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields) {
this(_proxy, _class, _query, _fields, null);
}
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
proxy = _proxy;
clazz = _class;
query = _query;
fields = _fields;
sort = _sort;
}
@Override
public V call() throws Exception {
return proxy.queryOne(clazz, query, fields, sort);
}
}

View File

@@ -0,0 +1,50 @@
package com.lanternsoftware.util.dao;
import java.util.Map;
import java.util.Map.Entry;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
public class QueryPreparer {
private final CaseFormat queryCaseFormat;
private final CaseFormat dbCaseFormat;
private final Map<String, String> fieldReplacements;
private final Map<String, String> fieldSuffixExceptions;
public QueryPreparer(CaseFormat _queryCaseFormat, CaseFormat _dbCaseFormat) {
this(_queryCaseFormat, _dbCaseFormat, null);
}
public QueryPreparer(CaseFormat _queryCaseFormat, CaseFormat _dbCaseFormat, Map<String, String> _fieldReplacements) {
this(_queryCaseFormat, _dbCaseFormat, _fieldReplacements, null);
}
public QueryPreparer(CaseFormat _queryCaseFormat, CaseFormat _dbCaseFormat, Map<String, String> _fieldReplacements, Map<String, String> _fieldSuffixExceptions) {
queryCaseFormat = _queryCaseFormat;
dbCaseFormat = _dbCaseFormat;
fieldReplacements = _fieldReplacements;
fieldSuffixExceptions = _fieldSuffixExceptions;
}
public DaoQuery prepareQuery(DaoQuery _query) {
DaoQuery query = new DaoQuery();
for (Entry<String, Object> e : _query.entrySet()) {
if (fieldReplacements != null) {
String rep = fieldReplacements.get(e.getKey());
if (rep != null) {
query.put(rep, e.getValue());
continue;
}
}
String field = AbstractDaoSerializer.convertCase(e.getKey(), queryCaseFormat, dbCaseFormat);
if (fieldSuffixExceptions != null) {
for (Entry<String, String> entry : fieldSuffixExceptions.entrySet()) {
if (field.endsWith(entry.getKey()))
field = field.substring(0, field.length()-entry.getKey().length()) + entry.getValue();
}
}
query.put(field, e.getValue());
}
return query;
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util.dao.annotations;
public enum CaseFormat {
SNAKE,
CAMEL,
PASCAL
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBClob {
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBIgnore {
String name() default "";
}

View File

@@ -0,0 +1,14 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.List;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBIndex {
String[] columns();
boolean hash() default false;
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBName {
String name() default "";
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface DBSerializable {
String name() default "";
String seq() default "";
CaseFormat caseFormat() default CaseFormat.SNAKE;
DBIndex[] indexes() default {};
boolean autogen() default true;
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBType {
Class<?> type() default String.class;
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Important {
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface NeverUpdate {
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface PrimaryKey {
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface StringDates {
String format() default "yyyy-MM-dd'T'hh:mm:ss.SSS";
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface TimestampDates {
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Unimportant {
}

View File

@@ -0,0 +1,137 @@
package com.lanternsoftware.util.dao.csv;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.IDaoSerializer;
public abstract class CSVStream {
protected static final Logger LOG = LoggerFactory.getLogger(CSVStream.class);
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class));
}
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class, DaoEntity _entity) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class), _entity);
}
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class), _headerTransformer);
}
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class, DaoEntity _entity, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class), _entity, _headerTransformer);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer) {
return new CSVIterator<T>(_is, _serializer);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _entity) {
return new CSVIterator<T>(_is, _serializer, _entity);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, _serializer, _headerTransformer);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _entity, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, _serializer, _entity, _headerTransformer);
}
private static class CSVIterator<T> implements Iterator<T> {
private final BufferedReader reader;
private final String[] headers;
private final IDaoSerializer<T> serializer;
private final DaoEntity metadata;
private String[] line = null;
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer) {
this(_is, _serializer, null, null);
}
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _metadata) {
this(_is, _serializer, _metadata, null);
}
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer, ITransformer<String, String> _headerTransformer) {
this(_is, _serializer, null, _headerTransformer);
}
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _metadata, ITransformer<String, String> _headerTransformer){
reader = new BufferedReader(new InputStreamReader(_is));
headers = line();
if ((_headerTransformer != null) && (headers != null)) {
for (int i=0; i<headers.length; i++) {
headers[i] = _headerTransformer.transform(headers[i]);
}
}
line = line();
serializer = _serializer;
metadata = _metadata;
}
@Override
public boolean hasNext() {
if (line == null) {
IOUtils.closeQuietly(reader);
return false;
}
return true;
}
@Override
public T next() {
DaoEntity entity = new DaoEntity();
if(metadata!=null){
Set<Map.Entry<String, Object>> entryset = metadata.entrySet();
for (Map.Entry<String, Object> entry : entryset) {
entity.put(entry.getKey(), entry.getValue());
}
}
for (int i = 0; i < headers.length; i++) {
entity.put(headers[i], CollectionUtils.get(line, i));
}
T t;
try {
t = serializer.fromDaoEntity(entity);
}
catch (RuntimeException e) {
line = line();
throw e;
}
line = line();
return t;
}
@Override
public void remove() {
}
private String[] line() {
try {
String line = reader.readLine();
if (line == null)
return null;
return line.split("\\s*,\\s*");
} catch (IOException _e) {
LOG.error("Failed to parse CSV", _e);
return null;
}
}
}
}

View File

@@ -0,0 +1,379 @@
package com.lanternsoftware.util.dao.generator;
import java.io.File;
import java.io.FileOutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.AnnotationFinder;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import com.lanternsoftware.util.dao.annotations.StringDates;
import com.lanternsoftware.util.dao.annotations.TimestampDates;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.DateUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DaoSerializerGenerator {
private static final Logger LOG = LoggerFactory.getLogger(DaoSerializerGenerator.class);
public static void generateSerializers(String _codePath) {
generateSerializers(_codePath, false, null);
}
public static void generateSerializers(String _codePath, boolean _serializeNestedObjects, List<DaoProxyType> _proxyTypes) {
generateSerializers(_codePath, _serializeNestedObjects, _proxyTypes, true);
}
public static void generateSerializers(String _codePath, boolean _serializeNestedObjects, List<DaoProxyType> _proxyTypes, boolean _generateSpiFile) {
if (CollectionUtils.isEmpty(_proxyTypes))
_proxyTypes = Collections.singletonList(DaoProxyType.MONGO);
Map<String, List<String>> serializers = new HashMap<>();
for (Map.Entry<String, String> e : AnnotationFinder.findAnnotatedClasses(_codePath, DBSerializable.class).entrySet()) {
SerializerGenerationResult result = generateSerializer(e.getKey(), e.getValue() + File.separator + "dao" + File.separator, _serializeNestedObjects, null, _proxyTypes, null);
if (result == null)
continue;
int idx = e.getValue().indexOf(File.separator + "src" + File.separator);
if (idx > -1)
CollectionUtils.addToMultiMap(e.getValue().substring(0, idx + 9) + File.separator + "resources" + File.separator + "META-INF" + File.separator + "services" + File.separator, result.getClassName(), serializers);
}
if (_generateSpiFile) {
for (Entry<String, List<String>> entry : serializers.entrySet()) {
new File(entry.getKey()).mkdirs();
FileOutputStream f = null;
try {
f = new FileOutputStream(entry.getKey() + "com.lanternsoftware.util.dao.IDaoSerializer");
Collections.sort(entry.getValue());
for (String className : entry.getValue()) {
f.write(NullUtils.toByteArray(className));
f.write(NullUtils.toByteArray("\r\n"));
}
}
catch (Exception e) {
LOG.error("Failed to create service loader file", e);
}
finally {
IOUtils.closeQuietly(f);
}
}
}
}
/**
* @param _className
* The name of the class to be serialized
* @param _outputPath
* The path to write the generated serializer to
* @param _serializeNestedObjects
* if true, the serializer will save a hierarchical structure containing all sub objects. If false, it only serializes primitives
* @param _fieldNameExceptions
* a mapping of standard names to actual names to handle special cases where existing objects aren't following proper naming conventions
* @param _intendedProxyTypes
* a list of proxy types that can use this generated serializer (can pass null if it should be used for all proxies)
* @param _primaryKey
* The database primary key field. For Mongo, this will be changed to "_id" in the serializer.
* @return The class name of the generated serializer
*/
public static SerializerGenerationResult generateSerializer(String _className, String _outputPath, boolean _serializeNestedObjects, Map<String, String> _fieldNameExceptions, List<DaoProxyType> _intendedProxyTypes, String _primaryKey) {
try {
Set<Class<?>> customSerializerFields = new HashSet<>();
Class<?> clazz = Class.forName(_className);
String packagePath;
int srcPos = _outputPath.indexOf(File.separator + "java" + File.separator);
if (srcPos > -1) {
packagePath = _outputPath.substring(srcPos + 6).replace(File.separator, ".");
if (packagePath.endsWith("."))
packagePath = packagePath.substring(0, packagePath.length() - 1);
}
else
packagePath = clazz.getPackage().getName() + ".dao";
StringBuilder serializer = new StringBuilder();
serializer.append("package ");
serializer.append(packagePath);
serializer.append(";\n\n");
Set<Class<?>> imports = CollectionUtils.asHashSet(AbstractDaoSerializer.class, DaoEntity.class, DaoSerializer.class, clazz);
if (CollectionUtils.isNotEmpty(_intendedProxyTypes)) {
imports.add(DaoProxyType.class);
imports.add(List.class);
if (_intendedProxyTypes.size() > 1)
imports.add(Arrays.class);
else
imports.add(Collections.class);
}
Map<String, List<Field>> mapFields = new HashMap<>();
List<Field> fields = DaoSerializer.getSerializableFields(clazz, _serializeNestedObjects);
for (Field f : fields) {
for (Annotation a : f.getAnnotations()) {
CollectionUtils.addToMultiMap(a.annotationType().getCanonicalName(), f, mapFields);
}
Class<?> type = AbstractDaoSerializer.getType(f);
if (Collection.class.isAssignableFrom(type)) {
Class<?> elementClass = ((Class<?>) (((ParameterizedType) f.getGenericType()).getActualTypeArguments()[0]));
imports.add(elementClass);
}
else if (type.isEnum() || AbstractDaoSerializer.requiresCustomSerializer(f))
imports.add(type);
}
CaseFormat caseFormat = CaseFormat.SNAKE;
DBSerializable dbSerializable = DaoSerializer.getAnnotation(clazz, DBSerializable.class);
if ((dbSerializable != null) && (dbSerializable.caseFormat() != null))
caseFormat = dbSerializable.caseFormat();
StringDates dateFormat = DaoSerializer.getAnnotation(clazz, StringDates.class);
if (dateFormat != null)
imports.add(DateUtils.class);
Field primaryKey = CollectionUtils.getFirst(mapFields.get(PrimaryKey.class.getCanonicalName()));
if (primaryKey != null) {
if (primaryKey.getType() != String.class)
imports.add(NullUtils.class);
if (NullUtils.isEmpty(_primaryKey))
_primaryKey = AbstractDaoSerializer.fieldToDatabaseName(primaryKey, caseFormat);
}
List<String> imp = CollectionUtils.transform(imports, new ITransformer<Class<?>, String>() {
@Override
public String transform(Class<?> _class) {
return "import " + _class.getCanonicalName() + ";\n";
}
});
Collections.sort(imp);
for (String i : imp) {
serializer.append(i);
}
serializer.append("\npublic class ");
serializer.append(clazz.getSimpleName());
serializer.append("Serializer extends AbstractDaoSerializer<");
serializer.append(clazz.getSimpleName());
serializer.append(">\n{\n");
if (dateFormat != null) {
serializer.append("\tprivate static final String FORMAT = \"");
serializer.append(dateFormat.format());
serializer.append("\";\n\n");
}
serializer.append("\t@Override\n\tpublic Class<");
serializer.append(clazz.getSimpleName());
serializer.append("> getSupportedClass()\n\t{\n\t\treturn ");
serializer.append(clazz.getSimpleName());
serializer.append(".class;\n\t}\n\n");
String intendedType = "DaoProxyType." + CollectionUtils.getFirst(_intendedProxyTypes).name();
if (CollectionUtils.isNotEmpty(_intendedProxyTypes)) {
serializer.append("\t@Override\n\tpublic List<DaoProxyType> getSupportedProxies() {\n\t\t");
if (_intendedProxyTypes.size() > 1) {
serializer.append("return Arrays.asList(");
serializer.append(CollectionUtils.transformToCommaSeparated(_intendedProxyTypes, new ITransformer<DaoProxyType, String>() {
@Override
public String transform(DaoProxyType _daoProxyType) {
return "DaoProxyType." + _daoProxyType.name();
}
}));
serializer.append(");\n\t}\n");
}
else {
serializer.append("return Collections.singletonList(");
serializer.append(intendedType);
serializer.append(");\n\t}\n\n");
}
}
serializer.append("\t@Override\n\tpublic DaoEntity toDaoEntity(");
serializer.append(clazz.getSimpleName());
serializer.append(" _o)\n\t{\n\t\tDaoEntity d = new DaoEntity(");
serializer.append(");\n");
StringBuilder from = new StringBuilder();
from.append("\t@Override\n\tpublic ");
from.append(clazz.getSimpleName());
from.append(" fromDaoEntity(DaoEntity _d)\n\t{\n\t\t");
from.append(clazz.getSimpleName());
from.append(" o = new ");
from.append(clazz.getSimpleName());
from.append("();\n");
for (Field f : fields) {
String databaseField = AbstractDaoSerializer.fieldToDatabaseName(f, caseFormat);
boolean customSerializer = AbstractDaoSerializer.requiresCustomSerializer(f);
Class<?> type = AbstractDaoSerializer.getType(f);
String classField = AbstractDaoSerializer.fieldToGetterName(f);
if (_fieldNameExceptions != null) {
String override = _fieldNameExceptions.get(classField);
if (NullUtils.isNotEmpty(override))
classField = override;
}
Class<?> collType = null;
if (Collection.class.isAssignableFrom(type))
collType = ((Class<?>) (((ParameterizedType) f.getGenericType()).getActualTypeArguments()[0]));
boolean mongoPrimaryKey = NullUtils.isEqual(databaseField, _primaryKey) && _intendedProxyTypes.contains(DaoProxyType.MONGO);
if (mongoPrimaryKey) {
databaseField = "_id";
serializer.append("\t\tif (_o.get");
serializer.append(classField);
serializer.append("() != null)");
serializer.append("\n\t\t\td.put(\"");
serializer.append(databaseField);
serializer.append("\", _o.get");
serializer.append(classField);
from.append("\t\to.set");
from.append(classField);
serializer.append("());\n");
from.append("(DaoSerializer.getString(_d, \"");
from.append(databaseField);
from.append("\"));\n");
}
else {
serializer.append("\t\td.put(\"");
serializer.append(databaseField);
if (customSerializer) {
if (Collection.class.isAssignableFrom(type)) {
serializer.append("\", DaoSerializer.toDaoEntities(_o.get");
serializer.append(classField);
customSerializerFields.add(collType);
}
else {
serializer.append("\", DaoSerializer.toDaoEntity(_o.get");
customSerializerFields.add(type);
serializer.append(classField);
}
if (NullUtils.isEmpty(intendedType))
serializer.append("()));\n");
else {
serializer.append("(), ");
serializer.append(intendedType);
serializer.append("));\n");
}
}
else if (type.equals(Date.class)) {
if (dateFormat != null) {
serializer.append("\", DateUtils.format(FORMAT, _o.get");
}
else if (DaoSerializer.isAnnotationPresent(clazz, TimestampDates.class))
serializer.append("\", DaoSerializer.toTimestamp(_o.get");
else
serializer.append("\", DaoSerializer.toLong(_o.get");
serializer.append(classField);
serializer.append("()));\n");
}
else if (type.isEnum()) {
serializer.append("\", DaoSerializer.toEnumName(_o.get");
serializer.append(classField);
serializer.append("()));\n");
}
else {
if (type.getName().equals("boolean")) {
serializer.append("\", _o.is");
}
else {
serializer.append("\", _o.get");
}
serializer.append(classField);
serializer.append("());\n");
}
from.append("\t\to.set");
from.append(classField);
from.append("(DaoSerializer.get");
if (type.getName().equals("int")) {
from.append("Integer");
}
else if (type.equals(Date.class)) {
from.append("Date");
}
else if (type.equals(String.class)) {
from.append("String");
}
else if (type.equals(BigDecimal.class)) {
from.append("BigDecimal");
}
else if (type.equals(byte[].class)) {
from.append("ByteArray");
}
else if (type.isEnum()) {
from.append("Enum");
}
else if (customSerializer) {
if (Collection.class.isAssignableFrom(type))
from.append("List");
else
from.append("Object");
}
else {
from.append(type.getSimpleName().substring(0, 1).toUpperCase());
from.append(type.getSimpleName().substring(1));
}
from.append("(_d, \"");
from.append(databaseField);
from.append("\"");
if (type.equals(Date.class) && (dateFormat != null))
from.append(", FORMAT");
else if (Collection.class.isAssignableFrom(type)) {
from.append(", ");
from.append(collType.getSimpleName());
from.append(".class");
}
else if (type.isEnum() || customSerializer) {
from.append(", ");
from.append(type.getSimpleName());
from.append(".class");
}
from.append("));\n");
}
}
serializer.append("\t\treturn d;\n\t}\n\n");
serializer.append(from.toString());
serializer.append("\t\treturn o;\n\t}\n}");
FileOutputStream f = null;
try {
if ((dbSerializable == null) || dbSerializable.autogen()) {
new File(_outputPath).mkdirs();
f = new FileOutputStream(_outputPath + clazz.getSimpleName() + "Serializer.java");
f.write(NullUtils.toByteArray(serializer.toString()));
}
return new SerializerGenerationResult(packagePath + "." + clazz.getSimpleName() + "Serializer", customSerializerFields);
}
catch (Exception e) {
LOG.error("Failed to write serializer", e);
return null;
}
finally {
IOUtils.closeQuietly(f);
}
}
catch (ClassNotFoundException e) {
return null;
}
}
private static boolean doesImplement(Class<?> _class, String _interfaceName) {
if ((_class == null) || _class.equals(Object.class))
return false;
for (Class<?> intf : _class.getInterfaces()) {
if (intf.getSimpleName().equals(_interfaceName))
return true;
if (doesImplement(intf, _interfaceName))
return true;
}
return doesImplement(_class.getSuperclass(), _interfaceName);
}
}

View File

@@ -0,0 +1,91 @@
package com.lanternsoftware.util.dao.generator;
import java.lang.reflect.Field;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.AnnotationFinder;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.DBClob;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import com.lanternsoftware.util.dao.annotations.TimestampDates;
public class SchemaGenerator {
public static void generateSchema(String _sourceCodeFolder) {
Map<String, String> classes = AnnotationFinder.findAnnotatedClasses(_sourceCodeFolder, DBSerializable.class);
for (String className : classes.keySet()) {
try {
Class<?> clazz = Class.forName(className);
if ((clazz == null) || !DaoSerializer.isAnnotationPresent(clazz, DBSerializable.class))
continue;
System.out.println(generateTableCreateStatement(clazz));
}
catch (ClassNotFoundException _e) {
}
}
}
public static String generateTableCreateStatement(Class<?> _entity) {
StringBuilder sql = new StringBuilder("CREATE TABLE ");
boolean timestampDates = _entity.getAnnotation(TimestampDates.class) != null;
String tableName = DaoSerializer.getTableName(_entity);
sql.append(tableName);
sql.append(" (");
boolean bFirst = true;
List<String> keys = DaoSerializer.getFieldsByAnnotation(_entity, PrimaryKey.class);
for (Field f : DaoSerializer.getSerializableFields(_entity)) {
String name = AbstractDaoSerializer.fieldToDatabaseName(f);
if (name == null)
continue;
StringBuilder col = new StringBuilder(name);
col.append(" ");
if (NullUtils.isOneOf(f.getType(), Byte.TYPE, byte.class))
col.append("NUMBER(3,0)");
else if (NullUtils.isOneOf(f.getType(), Short.TYPE, Short.class))
col.append("NUMBER(5,0)");
else if (NullUtils.isOneOf(f.getType(), Integer.TYPE, Integer.class))
col.append("NUMBER(10,0)");
else if (NullUtils.isOneOf(f.getType(), Long.TYPE, Long.class))
col.append("NUMBER(19,0)");
else if (NullUtils.isOneOf(f.getType(), Double.TYPE, Double.class, Float.TYPE, Float.class))
col.append("NUMBER(19,4)");
else if (NullUtils.isOneOf(f.getType(), Boolean.TYPE, Boolean.class))
col.append("NUMBER(1,0)");
else if (f.getType().equals(String.class) || f.getType().isEnum()) {
if (f.getAnnotation(DBClob.class) != null)
col.append("CLOB");
else
col.append("VARCHAR(255)");
}
else if (f.getType().equals(Date.class)) {
if (timestampDates)
col.append("TIMESTAMP");
else
col.append("NUMBER(19,0)");
}
else
continue;
if ((f.getAnnotation(PrimaryKey.class) != null) && (keys.size() == 1))
col.append(" PRIMARY KEY");
if (!bFirst)
sql.append(",");
else
bFirst = false;
sql.append(col);
}
if (keys.size() > 1) {
sql.append(", CONSTRAINT ");
sql.append(tableName);
sql.append("_pk PRIMARY KEY (");
sql.append(CollectionUtils.commaSeparated(keys));
sql.append(")");
}
sql.append(");");
return sql.toString();
}
}

View File

@@ -0,0 +1,21 @@
package com.lanternsoftware.util.dao.generator;
import java.util.Set;
public class SerializerGenerationResult {
private final String className;
private final Set<Class<?>> fieldsNeedingCustomSerializers;
public SerializerGenerationResult(String _className, Set<Class<?>> _fieldsNeedingCustomSerializers) {
className = _className;
fieldsNeedingCustomSerializers = _fieldsNeedingCustomSerializers;
}
public String getClassName() {
return className;
}
public Set<Class<?>> getFieldsNeedingCustomSerializers() {
return fieldsNeedingCustomSerializers;
}
}

View File

@@ -0,0 +1,200 @@
package com.lanternsoftware.util.dao.generator;
import java.io.File;
import java.io.FileOutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.AnnotationFinder;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
public class SwiftModelGenerator {
private static final Logger LOG = LoggerFactory.getLogger(SwiftModelGenerator.class);
public static void generateModel(String _codePath, String _outputPath) {
for (Entry<String, String> e : AnnotationFinder.findAnnotatedClasses(_codePath, DBSerializable.class).entrySet()) {
generateSerializer(e.getKey(), e.getValue().replace(_codePath, _outputPath) + File.separator + "bson" + File.separator);
}
}
private static SerializerGenerationResult generateSerializer(String _className, String _outputPath) {
try {
Set<Class<?>> customSerializerFields = new HashSet<>();
Class<?> clazz = Class.forName(_className);
DBSerializable dbSerializable = DaoSerializer.getAnnotation(clazz, DBSerializable.class);
if (dbSerializable == null)
return null;
CaseFormat caseFormat = dbSerializable.caseFormat();
StringBuilder bson = new StringBuilder();
bson.append("import Foundation\nimport BSON\n\nclass ");
bson.append(clazz.getSimpleName());
bson.append(":LanternObject {\n");
List<Field> fields = DaoSerializer.getSerializableFields(clazz, true);
Map<String, List<Field>> mapFields = new HashMap<>();
for (Field f : fields) {
if (Modifier.isStatic(f.getModifiers()) || Modifier.isTransient(f.getModifiers()))
continue;
for (Annotation a : f.getAnnotations()) {
CollectionUtils.addToMultiMap(a.annotationType().getCanonicalName(), f, mapFields);
}
}
String primaryKeyName = null;
Field primaryKey = CollectionUtils.getFirst(mapFields.get(PrimaryKey.class.getCanonicalName()));
if (primaryKey != null) {
primaryKeyName = AbstractDaoSerializer.fieldToDatabaseName(primaryKey, caseFormat);
}
for (Field f : fields) {
bson.append("\tvar ");
bson.append(f.getName());
bson.append(":");
bson.append(typeToSwift(f));
bson.append("?\n");
}
bson.append("\n\tinit() {}\n\n\trequired init(bson: Document) {\n");
for (Field f : fields) {
bson.append("\t\tself.");
bson.append(f.getName());
String databaseName = AbstractDaoSerializer.fieldToDatabaseName(f, caseFormat);
if (NullUtils.isEqual(databaseName, primaryKeyName))
databaseName = "_id";
if (AbstractDaoSerializer.getCollectionType(f) != null) {
bson.append(" = BsonUtils.getList(bson:bson, field:\"");
bson.append(databaseName);
bson.append("\")\n");
}
else if (AbstractDaoSerializer.requiresCustomSerializer(f)) {
bson.append(" = BsonUtils.getObject(bson:bson, field:\"");
bson.append(databaseName);
bson.append("\")\n");
}
else {
bson.append(" = bson[\"");
bson.append(databaseName);
bson.append("\"] as? ");
bson.append(typeToSwift(f));
bson.append("\n");
}
}
bson.append("\t}\n\n\tfunc toBSON()->Document {\n\t\tlet bson: Document = [");
boolean first = true;
for (Field f : fields) {
if (!first)
bson.append(",");
else
first = false;
String databaseName = AbstractDaoSerializer.fieldToDatabaseName(f, caseFormat);
if (NullUtils.isEqual(databaseName, primaryKeyName))
databaseName = "_id";
bson.append("\n\t\t\t\"");
bson.append(databaseName);
bson.append("\": ");
if (AbstractDaoSerializer.getCollectionType(f) != null) {
bson.append("BsonUtils.toDocument(coll:self.");
bson.append(f.getName());
bson.append(")");
}
else if (AbstractDaoSerializer.requiresCustomSerializer(f)) {
bson.append("BsonUtils.toDocument(obj:self.");
bson.append(f.getName());
bson.append(")");
}
else {
bson.append("self.");
bson.append(f.getName());
}
}
bson.append("\n\t\t]\n\t\treturn bson\n\t}\n}");
FileOutputStream f = null;
try {
// if (dbSerializable.autogen()) {
new File(_outputPath).mkdirs();
f = new FileOutputStream(_outputPath + clazz.getSimpleName() + ".swift");
f.write(NullUtils.toByteArray(bson.toString()));
// }
return new SerializerGenerationResult(clazz.getSimpleName() + ".swift", customSerializerFields);
}
catch (Exception e) {
e.printStackTrace();
return null;
}
finally {
IOUtils.closeQuietly(f);
}
}
catch (ClassNotFoundException e) {
return null;
}
}
private static String typeToSwift(Field _f) {
if (Collection.class.isAssignableFrom(_f.getType()))
return "[" + typeToSwift(AbstractDaoSerializer.getCollectionType(_f)) + "]";
return typeToSwift(_f.getType());
}
private static String typeToSwift(Class<?> _class) {
if (NullUtils.isOneOf(_class, Integer.TYPE, Integer.class))
return "Int32";
if (NullUtils.isOneOf(_class, Long.TYPE, Long.class))
return "Int64";
if (NullUtils.isOneOf(_class, Double.TYPE, Double.class))
return "Double";
if (NullUtils.isOneOf(_class, Float.TYPE, Float.class))
return "Float";
if (NullUtils.isOneOf(_class, Boolean.TYPE, Boolean.class))
return "Bool";
if (_class.equals(Date.class))
return "Date";
if (_class.equals(String.class))
return "String";
if (_class.equals(byte[].class))
return "[Uint8]";
if (_class.isEnum())
return "String";
if (NullUtils.isOneOf(_class, Short.TYPE, Short.class))
return "Int16";
if (NullUtils.isOneOf(_class, Byte.TYPE, Byte.class))
return "Int8";
return _class.getSimpleName();
}
private static boolean doesImplement(Class<?> _class, String _interfaceName) {
if ((_class == null) || _class.equals(Object.class))
return false;
for (Class<?> intf : _class.getInterfaces()) {
if (intf.getSimpleName().equals(_interfaceName))
return true;
if (doesImplement(intf, _interfaceName))
return true;
}
return doesImplement(_class.getSuperclass(), _interfaceName);
}
}

View File

@@ -0,0 +1,79 @@
package com.lanternsoftware.util.dao.jdbc;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.NullUtils;
public class DataSourceProxy extends AbstractJdbcProxy {
private static final Logger LOG = LoggerFactory.getLogger(DataSourceProxy.class);
private DataSource dataSource;
private String schemaName;
public DataSourceProxy(String _jndiDataSourceName, String _schemaName) {
this(_jndiDataSourceName);
schemaName = _schemaName;
}
public DataSourceProxy(String _jndiDataSourceName) {
try {
dataSource = (DataSource)new InitialContext().lookup(_jndiDataSourceName);
}
catch (Exception e) {
try {
dataSource = (DataSource)new InitialContext().lookup("java:/comp/env/"+_jndiDataSourceName);
}
catch (Exception _e) {
LOG.error("Error looking up " + _jndiDataSourceName, e);
}
}
}
public DataSourceProxy(DataSource _dataSource) {
dataSource = _dataSource;
try {
DatabaseMetaData metaData = getConnection().getMetaData();
if (metaData.getDatabaseProductName().equals("Oracle") && (metaData.getDatabaseMajorVersion() >= 12))
databaseType = DatabaseType.ORACLE_12C;
}
catch (SQLException _e) {
LOG.error("Could not get database type", _e);
}
}
@Override
public Connection getConnection() {
try {
Connection conn = dataSource.getConnection();
if (NullUtils.isNotEmpty(schemaName) && (conn != null))
conn.setSchema(schemaName);
return conn;
}
catch (SQLException _e) {
LOG.error("Failed to get a jdbc connection", _e);
return null;
}
}
@Override
public boolean isConnected() {
try {
return getConnection().isValid(10);
}
catch (Exception _e) {
LOG.error("Failed to get a jdbc connection", _e);
return false;
}
}
@Override
public boolean alwaysClose() {
return true;
}
}

View File

@@ -0,0 +1,8 @@
package com.lanternsoftware.util.dao.jdbc;
public enum DatabaseType {
ORACLE_11G,
ORACLE_12C,
MYSQL,
CACHE
}

View File

@@ -0,0 +1,91 @@
package com.lanternsoftware.util.dao.jdbc;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
@DBSerializable
public class JdbcConfig {
private DatabaseType type;
private String username;
private String password;
private String hostname;
private String database;
private String port;
public JdbcConfig() {
}
public JdbcConfig(DatabaseType _type, String _username, String _password, String _hostname, String _database, String _port) {
type = _type;
username = _username;
password = _password;
hostname = _hostname;
database = _database;
port = _port;
}
public DatabaseType getType() {
return type;
}
public void setType(DatabaseType _type) {
type = _type;
}
public String getUsername() {
return username;
}
public void setUsername(String _username) {
username = _username;
}
public String getPassword() {
return password;
}
public void setPassword(String _password) {
password = _password;
}
public String getHostname() {
return hostname;
}
public void setHostname(String _hostname) {
hostname = _hostname;
}
public String getDatabase() {
return database;
}
public void setDatabase(String _database) {
database = _database;
}
public String getPort() {
return port;
}
public void setPort(String _port) {
port = _port;
}
public String getConnectionString() {
StringBuilder conn = new StringBuilder("jdbc:");
if (type == DatabaseType.MYSQL)
conn.append("mysql");
else
conn.append("oracle:thin");
conn.append("://");
conn.append(hostname);
conn.append(":");
conn.append(port);
if (NullUtils.isNotEmpty(database)) {
conn.append("/");
conn.append(database);
}
return conn.toString();
}
}

View File

@@ -0,0 +1,62 @@
package com.lanternsoftware.util.dao.jdbc;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JdbcProxy extends AbstractJdbcProxy {
private static final Logger LOG = LoggerFactory.getLogger(JdbcProxy.class);
private Connection connection;
public JdbcProxy(Connection _connection) {
connection = _connection;
}
@Override
public Connection getConnection() {
return connection;
}
@Override
public boolean isConnected() {
try {
return connection.isValid(10);
}
catch (Exception _e) {
LOG.error("Failed to get a jdbc connection", _e);
return false;
}
}
@Override
public boolean alwaysClose() {
return false;
}
public static JdbcProxy getProxy(JdbcConfig _config) {
return getProxy(_config.getType(), _config.getConnectionString(), _config.getUsername(), _config.getPassword());
}
public static JdbcProxy getProxy(DatabaseType _type, String _connectionString, String _username, String _password) {
String driver;
if (_type == DatabaseType.MYSQL)
driver = "com.mysql.cj.jdbc.Driver";
else if (_type == DatabaseType.CACHE)
driver = "com.intersys.jdbc.CacheDriver";
else
driver = "oracle.jdbc.driver.OracleDriver";
try {
DriverManager.registerDriver(Class.forName(driver).asSubclass(Driver.class).newInstance());
JdbcProxy proxy = new JdbcProxy(DriverManager.getConnection(_connectionString, _username, _password));
proxy.databaseType = _type;
return proxy;
}
catch (Exception _e) {
LOG.error("Failed to load JDBC driver for database type: " + _type, _e);
return null;
}
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util.dao.jdbc;
public abstract class OracleTestProxy {
public static JdbcProxy getProxy(String _connectionString, String _username, String _password) {
return JdbcProxy.getProxy(DatabaseType.ORACLE_11G, _connectionString, _username, _password);
}
}

View File

@@ -0,0 +1,51 @@
package com.lanternsoftware.util.dao.jdbc.dao;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.jdbc.DatabaseType;
import com.lanternsoftware.util.dao.jdbc.JdbcConfig;
import java.util.Collections;
import java.util.List;
public class JdbcConfigSerializer extends AbstractDaoSerializer<JdbcConfig>
{
@Override
public Class<JdbcConfig> getSupportedClass()
{
return JdbcConfig.class;
}
@Override
public List<DaoProxyType> getSupportedProxies() {
return Collections.singletonList(DaoProxyType.MONGO);
}
@Override
public DaoEntity toDaoEntity(JdbcConfig _o)
{
DaoEntity d = new DaoEntity();
d.put("type", DaoSerializer.toEnumName(_o.getType()));
d.put("username", _o.getUsername());
d.put("password", _o.getPassword());
d.put("hostname", _o.getHostname());
d.put("database", _o.getDatabase());
d.put("port", _o.getPort());
return d;
}
@Override
public JdbcConfig fromDaoEntity(DaoEntity _d)
{
JdbcConfig o = new JdbcConfig();
o.setType(DaoSerializer.getEnum(_d, "type", DatabaseType.class));
o.setUsername(DaoSerializer.getString(_d, "username"));
o.setPassword(DaoSerializer.getString(_d, "password"));
o.setHostname(DaoSerializer.getString(_d, "hostname"));
o.setDatabase(DaoSerializer.getString(_d, "database"));
o.setPort(DaoSerializer.getString(_d, "port"));
return o;
}
}

View File

@@ -0,0 +1,89 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.util.Collection;
import java.util.Iterator;
import java.util.TreeSet;
/**
* Utility class that houses the logic to determine bucket sizes for batched in-clauses.
*/
class BatchBucket {
private static final int MAX_BATCH_SIZE = 1000;
private static final Collection<Integer> predefinedBucketSizes = new TreeSet<Integer>();
static {
predefinedBucketSizes.add(1);
predefinedBucketSizes.add(10);
predefinedBucketSizes.add(25);
predefinedBucketSizes.add(50);
predefinedBucketSizes.add(100);
predefinedBucketSizes.add(500);
predefinedBucketSizes.add(MAX_BATCH_SIZE);
}
private final Collection<Integer> bucketSizes = new TreeSet<Integer>();
private int maxBatchSize;
/**
* Default Constructor. The default maximum batch size is currently 1000.
*/
public BatchBucket() {
bucketSizes.addAll(predefinedBucketSizes);
maxBatchSize = MAX_BATCH_SIZE;
}
/**
* Batch-size Constructor
*
* @param _nMaxBatchSize
* - Integer specifying the maximum batch size. If this value is greater than the maximum batch size (1000),
* it will be truncated.
*/
public BatchBucket(int _nMaxBatchSize) {
if (_nMaxBatchSize <= 0 || _nMaxBatchSize > MAX_BATCH_SIZE) {
bucketSizes.addAll(predefinedBucketSizes);
maxBatchSize = MAX_BATCH_SIZE;
return;
}
Iterator<Integer> iter = predefinedBucketSizes.iterator();
while (iter.hasNext()) {
int nNextBatchSize = iter.next();
if (nNextBatchSize < _nMaxBatchSize) {
maxBatchSize = nNextBatchSize;
bucketSizes.add(nNextBatchSize);
}
else
maxBatchSize = _nMaxBatchSize;
}
bucketSizes.add(_nMaxBatchSize);
}
/**
* @return the calculated maximum batch size
*/
public int getMaxBatchSize() {
return maxBatchSize;
}
/**
* Method to calculate the appropriate batch size for a specific current size
*
* @param _nCurSize
* - Integer representing the current size of the statement
* @return an Integer representing the batch size
*/
public int getBatchSize(int _nCurSize) {
if (_nCurSize <= 0)
return 0;
Iterator<Integer> iter = bucketSizes.iterator();
while (iter.hasNext()) {
int nNextBatchSize = iter.next();
if (_nCurSize <= nNextBatchSize)
return nNextBatchSize;
}
return 0;
}
}

View File

@@ -0,0 +1,148 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.sql.PreparedStatement;
import java.util.Iterator;
import java.util.LinkedList;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
/**
* Class that represents a single in-clause statement (all columns - {@link InClauseColumn}) Example: Batch Size: 2
* select * from table where (record_id, str_val) in (('1','2'),('3','4')) InClause is an abstract representation for
* all of the pieces of information: InClauseColumn Column Display: record_id Parameters: '1', '3' InClauseColumn Column
* Display: str_val Parameters: '2', '4'
*/
class InClause {
private final InClauseColumn[] columns;
private BatchBucket batchBucket;
private final int startIdx;
private final boolean returnAllIfEmpty;
/**
* Default Constructor
*
* @param _startIdx
* - Integer representing the index of the in-clause inside the SQL statement
* @param _returnAllIfEmpty
* - a boolean flag that determines the resulting behavior if the {@link InClauseColumn}s are empty. If true,
* the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with
* '1 = 0'.
* @param _columns
* - a variable list of {@link InClauseColumn}s that make up the in-clause
*/
public InClause(int _startIdx, boolean _returnAllIfEmpty, InClauseColumn... _columns) {
columns = _columns;
startIdx = _startIdx;
returnAllIfEmpty = _returnAllIfEmpty;
batchBucket = new BatchBucket(0);
}
/**
* Method which defines a maximum batch size used to calculate in-clause batch sizes.
*
* @param _batchSize
*/
public void setMaxBatchSize(int _batchSize) {
batchBucket = new BatchBucket(_batchSize);
}
/**
* Method to take all of the {@link InClauseColumn}s that make up the in-clause and turned those into a list of
* {@link InClauseBatchedParameter}s. When executing an in-clause, some databases restrict the number of items that may
* be in a specified in-clause (currently the limit is 1000). This item-limit includes items for all columns (i.e. if
* you have two columns, each column may only contribute half of the total - i.e. 500 items). Therefore, we need to
* build a list of batched parameters that do not exceed the maximum item threshold, but at the same time is batched
* appropriately (to minimize the number of {@link PreparedStatement}s that will be generated. When optimizing batch
* sizes, there are a few special cases that are noted below. <strong>Special Case 1:</strong> The batch-size is less
* than the number of {@link InClauseColumn}s In this situation, the batch size will be adjusted to the number of
* columns. For example, if you set the batch size to 1 and you pass in 2 columns, your optimized batch size will be 2.
* <strong>Special Case 2:</strong> No {@link PreparedParameter}s are set on any {@link InClauseColumn}s In this
* situation, the behavior will depend on the ReturnAllIfEmpty flag passed in during the constructor. If true, the
* in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with '1 = 0'.
* <strong>Special Case 3:</strong> A single {@link InClauseColumn} with a single {@link PreparedParameter}s is set. In
* this situation, the in-clause will be optimized to be an equals (i.e. = ? and not in (?)).
*
* @return a {@link LinkedList} of optimum-sized batched {@link PreparedParameter}s
*/
public LinkedList<InClauseBatchedParameter> getBatchedParameters() {
LinkedList<InClauseBatchedParameter> listReturnParameters = new LinkedList<InClauseBatchedParameter>();
if (columns == null || columns.length == 0)
return listReturnParameters;
int nColumnCnt = columns.length;
int nCurrentSize = 0;
InClauseBatchedParameter batchedParameters = new InClauseBatchedParameter(columns);
listReturnParameters.add(batchedParameters);
// special case 2: if there are no parameters, we will honor the behavior of returnAllIfEmpty
if (nColumnCnt >= 1 && columns[0].getParameterCnt() == 0) {
batchedParameters.setReturnAllIfEmpty(returnAllIfEmpty);
return listReturnParameters;
}
// special case 3: if there is only 1 column and 1 element in the column, we can use = , not in()
if (nColumnCnt == 1 && columns[0].getParameterCnt() == 1) {
batchedParameters.addParameter(columns[0].getNextParameter());
return listReturnParameters;
}
InClauseBatchedParameter lastBatchedParameters = null;
boolean bMoreParameters = true;
while (bMoreParameters) {
if (/* special case 1 */nCurrentSize > 0 && (nCurrentSize + nColumnCnt) > batchBucket.getMaxBatchSize()) {
batchedParameters = new InClauseBatchedParameter(columns);
listReturnParameters.add(batchedParameters);
nCurrentSize = 0;
}
lastBatchedParameters = new InClauseBatchedParameter(columns);
for (InClauseColumn column : columns) {
if (column == null)
continue;
PreparedParameter parameter = column.getNextParameter();
batchedParameters.addParameter(parameter);
lastBatchedParameters.addParameter(parameter);
nCurrentSize++;
if (!column.hasNextParameter())
bMoreParameters = false;
}
}
// optimize the size of the remaining bucket
if (lastBatchedParameters != null) {
int nBatchSize = batchBucket.getBatchSize(nCurrentSize);
while (nCurrentSize < nBatchSize) {
// if we are going to add more parameters than the bucket size, we need to break now
if (nCurrentSize + nColumnCnt > nBatchSize)
break;
Iterator<PreparedParameter> iter = lastBatchedParameters.getParameters().iterator();
while (iter.hasNext()) {
batchedParameters.addParameter(iter.next());
++nCurrentSize;
}
}
}
return listReturnParameters;
}
/**
* @return Integer representing the starting index in the SQL statement
*/
public int getStartIndex() {
return startIdx;
}
/**
* Method to reset the clause after an evaluation
*/
public void reset() {
for (InClauseColumn column : columns) {
if (column != null)
column.reset();
}
}
}

View File

@@ -0,0 +1,90 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.sql.PreparedStatement;
import java.util.LinkedList;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
/**
* Class that represents a collection of data to be set into a {@link PreparedStatement} ({@link PreparedParameter}).
* This will represent all of the data associated with an {@link InClauseColumn} Example: Batch Size: 2 select * from
* table where (record_id, str_val) in (('1','2'),('3','4')) InClauseBatchedParameter represents the Collection of
* batched parameters need to execute the InClause: Collection<PreparedParameter>: '1', '2', '3', '4' If we change the
* batch size to one, the InClause would now have 2 InClauseBatchedParameters: Collection<PreparedParameter>: '1', '3'
* Collection<PreparedParameter>: '2', '4'
*/
public class InClauseBatchedParameter {
private final LinkedList<PreparedParameter> parameters = new LinkedList<PreparedParameter>();
private final InClauseColumn[] columns;
private int size = 0;
private boolean returnAllIfEmpty = false;
/**
* Default Constructor
*
* @param _arrColumns
* - a variable list of {@link InClauseColumn}s that make up the in-clause
*/
public InClauseBatchedParameter(InClauseColumn[] _arrColumns) {
columns = _arrColumns;
}
/**
* Method to add a new primitive parameter to the batched Collection
*
* @param _parameter
* - the {@link PreparedParameter} to be added
*/
public void addParameter(PreparedParameter _parameter) {
parameters.add(_parameter);
++size;
}
/**
* @param _bReturnAllIfEmpty
* - a boolean flag that determines the resulting behavior if the {@link InClauseColumn}s are empty. If true,
* the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with
* '1 = 0'
*/
public void setReturnAllIfEmpty(boolean _bReturnAllIfEmpty) {
returnAllIfEmpty = _bReturnAllIfEmpty;
}
/**
* @return boolean; if true, the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will
* replace with '1 = 0'
*/
public boolean isReturnAllIfEmpty() {
return returnAllIfEmpty;
}
/**
* @return Integer representing the number of batched {@link PreparedParameter}s
*/
public int getBatchParameterCnt() {
return size;
}
/**
* @return Integer representing the number of {@link InClauseColumn}s that make up the batched
* {@link PreparedParameter}s
*/
public int getColumnCnt() {
return columns.length;
}
/**
* @return the array of {@link InClauseColumn}s that make up the batched {@link PreparedParameter}s
*/
public InClauseColumn[] getColumns() {
return columns;
}
/**
* @return the {@link LinkedList} of batched {@link PreparedParameter}s
*/
public LinkedList<PreparedParameter> getParameters() {
return parameters;
}
}

View File

@@ -0,0 +1,148 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
class InClauseBuilder {
private final Map<Integer, InClause> inClauses = new HashMap<Integer, InClause>();
private int maxBatchSize = 0;
/**
* @param _clause
* - {@link InClause} to be processed.
*/
public void addClause(InClause _clause) {
if (_clause != null) {
inClauses.put(_clause.getStartIndex(), _clause);
}
}
/**
* Method which defines a maximum batch size used to calculate in-clause batch sizes.
*
* @param _batchSize
*/
public void setMaxBatchSize(int _batchSize) {
maxBatchSize = _batchSize;
}
/**
* Method to generate a Collection of {@link InClauseStatement}s to be executed. These statements represent all possible combinations of {@link InClauseBatchedParameter}s from all {@link InClause}s that will need to be executed in order to fulfill the set in-clauses.
*
* @return Collection of {@link InClauseStatement}s to be executed. Will not return null, but return an empty Collection
*/
public Collection<InClauseStatement> buildStatements() {
Collection<InClauseStatement> collStatements = new ArrayList<InClauseStatement>();
if (inClauses.isEmpty()) {
return collStatements;
}
Map<Integer, LinkedList<InClauseBatchedParameter>> mapBatchedParameters = new HashMap<Integer, LinkedList<InClauseBatchedParameter>>();
Iterator<Entry<Integer, InClause>> iter = inClauses.entrySet().iterator();
while (iter.hasNext()) {
Entry<Integer, InClause> entry = iter.next();
if (entry == null) {
continue;
}
int nStartIdx = entry.getKey();
InClause inClause = entry.getValue();
if (inClause == null) {
continue;
}
inClause.setMaxBatchSize(maxBatchSize);
LinkedList<InClauseBatchedParameter> batchedParameters = inClause.getBatchedParameters();
mapBatchedParameters.put(nStartIdx, batchedParameters);
}
buildInClauseStatements(mapBatchedParameters, collStatements);
return collStatements;
}
private void buildInClauseStatements(Map<Integer, LinkedList<InClauseBatchedParameter>> _batchedParameters, Collection<InClauseStatement> _inStatements) {
if (_batchedParameters.isEmpty()) {
return;
}
Entry<Integer, LinkedList<InClauseBatchedParameter>> entry = _batchedParameters.entrySet().iterator().next();
int nStartIdx = entry.getKey();
_batchedParameters.remove(nStartIdx);
LinkedList<InClauseBatchedParameter> parameters = entry.getValue();
if (parameters == null) {
return;
}
if (_inStatements.isEmpty()) {
for (InClauseBatchedParameter parameter : parameters) {
if (parameter == null) {
continue;
}
InClauseStatement statement = new InClauseStatement();
statement.setNextParameter(nStartIdx, parameter);
_inStatements.add(statement);
}
}
else {
Collection<InClauseStatement> collNewStatements = new ArrayList<InClauseStatement>();
Iterator<InClauseStatement> iter = _inStatements.iterator();
while (iter.hasNext()) {
InClauseStatement existingStatement = iter.next();
if (existingStatement == null) {
continue;
}
// we need to clone the statement because we'll add a parameter to the first statement,
// but we need the original to add other combinations of the parameters
InClauseStatement clonedStatement = existingStatement.clone();
boolean bFirst = true;
for (InClauseBatchedParameter parameter : parameters) {
if (parameter == null) {
continue;
}
/*
* if there's only 1 parameter, we can just add that parameter to the existing statements for additional parameters, we
* need to build all of the remaining combinations for the new parameter we the existing statements
*/
if (bFirst) {
existingStatement.setNextParameter(nStartIdx, parameter);
bFirst = false;
continue;
}
else {
InClauseStatement newStatement = clonedStatement.clone();
newStatement.setNextParameter(nStartIdx, parameter);
collNewStatements.add(newStatement);
}
}
}
_inStatements.addAll(collNewStatements);
}
buildInClauseStatements(_batchedParameters, _inStatements);
}
/**
* Method to reset the builder
*/
public void reset() {
for (InClause clause : inClauses.values()) {
if (clause != null) {
clause.reset();
}
}
inClauses.clear();
maxBatchSize = 0;
}
}

Some files were not shown because too many files have changed in this diff Show More