Initial Commit

This commit is contained in:
Mark Milligan
2021-01-14 16:28:24 -06:00
parent 21c28201c5
commit 1334c110ff
318 changed files with 24160 additions and 0 deletions

View File

@@ -0,0 +1,323 @@
package com.lanternsoftware.util.dao;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
public abstract class AbstractDaoProxy implements IDaoProxy {
private ExecutorService executor;
private int maxThreads = 50;
protected QueryPreparer queryPreparer = null;
@Override
public void shutdown() {
if (executor != null)
executor.shutdownNow();
}
public void setQueryPreparer(QueryPreparer _queryPreparer) {
queryPreparer = _queryPreparer;
}
@Override
public <T> List<T> query(Class<T> _class, DaoQuery _query) {
return query(_class, _query, (DaoSort) null);
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, DaoSort _sort) {
return query(_class, _query, null, _sort);
}
@Override
public <T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return query(_class, _query, _fields, null);
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return query(_class, _query, _fields, _sort, 0, -1);
}
@Override
public <T> List<T> query(final Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _first, int _count) {
return toObjects(queryForEntities(DaoSerializer.getTableName(_class, getType()), _query, _fields, _sort, _first, _count), _class);
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query) {
return submit(new QueryExecution<T>(this, _class, _query));
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return submit(new QueryExecution<T>(this, _class, _query, _sort));
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return submit(new QueryExecution<T>(this, _class, _query, _fields));
}
@Override
public <T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return submit(new QueryExecution<T>(this, _class, _query, _fields, _sort));
}
@Override
public <T, V> Future<List<V>> queryWithFinalizer(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, QueryFinalizer<T, V> _finalizer) {
return submit(new QueryFinalizerExecution<T, V>(this, _class, _query, _fields, _sort, _finalizer));
}
@Override
public <T> DaoPage<T> queryPage(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
return new DaoPage<T>(query(_class, _query, _fields, _sort, _offset, _count), count(_class, _query));
}
@Override
public DaoPage<DaoEntity> queryForEntitiesPage(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count) {
return new DaoPage<DaoEntity>(queryForEntities(_tableName, _query, _fields, _sort, _offset, _count), count(_tableName, _query));
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query) {
return queryOne(_class, _query, null, null);
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return queryOne(_class, _query, null, _sort);
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return queryOne(_class, _query, _fields, null);
}
@Override
public <T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return CollectionUtils.getFirst(query(_class, _query, _fields, _sort, 0, 1));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query) {
return submit(new QueryOneExecution<T>(this, _class, _query));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return submit(new QueryOneExecution<T>(this, _class, _query, _sort));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
return submit(new QueryOneExecution<T>(this, _class, _query, _fields));
}
@Override
public <T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return submit(new QueryOneExecution<T>(this, _class, _query, _fields, _sort));
}
@Override
public <T> List<T> queryImportant(Class<T> _class, DaoQuery _query) {
return queryImportant(_class, _query, null);
}
@Override
public <T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return query(_class, _query, DaoSerializer.getImportantFields(_class), _sort);
}
@Override
public <T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort, int _first, int _count) {
return query(_class, _query, DaoSerializer.getImportantFields(_class), _sort, _first, _count);
}
@Override
public <T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query) {
return queryAsync(_class, _query, DaoSerializer.getImportantFields(_class));
}
@Override
public <T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query, DaoSort _sort) {
return queryAsync(_class, _query, DaoSerializer.getImportantFields(_class), _sort);
}
@Override
public <T> DaoPage<T> queryImportantPage(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count) {
return new DaoPage<T>(queryImportant(_class, _query, _sort, _offset, _count), count(_class, _query));
}
@Override
public <T> List<T> queryAll(Class<T> _class) {
return query(_class, null);
}
@Override
public boolean exists(Class<?> _class, DaoQuery _query) {
return exists(DaoSerializer.getTableName(_class, getType()), _query);
}
@Override
public boolean exists(String _tableName, DaoQuery _query) {
return count(_tableName, _query) > 0;
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query) {
return queryForEntities(_tableName, _query, (DaoSort) null);
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields) {
return queryForEntities(_tableName, _query, _fields, null);
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, DaoSort _sort) {
return queryForEntities(_tableName, _query, null, _sort);
}
@Override
public List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return queryForEntities(_tableName, _query, _fields, _sort, 0, -1);
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, null, null, 0, 1));
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query, DaoSort _sort) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, null, _sort, 0, 1));
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, _fields, null, 0, 1));
}
@Override
public DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, _fields, _sort, 0, 1));
}
@Override
public String queryForOneField(Class<?> _class, DaoQuery _query, String _field) {
return CollectionUtils.getFirst(queryForField(_class, _query, _field));
}
@Override
public List<String> queryForField(Class<?> _class, DaoQuery _query, String _field) {
return queryForField(DaoSerializer.getTableName(_class, getType()), _query, _field);
}
@Override
public List<String> queryForField(Class<?> _class, DaoQuery _query, final String _field, DaoSort _sort) {
return CollectionUtils.transform(queryForEntities(DaoSerializer.getTableName(_class, getType()), _query, Arrays.asList(_field), _sort), new ITransformer<DaoEntity, String>() {
@Override
public String transform(DaoEntity _daoEntity) {
return DaoSerializer.getString(_daoEntity, _field);
}
});
}
@Override
public List<String> queryForField(String _tableName, DaoQuery _query, final String _field) {
return CollectionUtils.transform(queryForEntities(_tableName, _query, Arrays.asList(_field)), new ITransformer<DaoEntity, String>() {
@Override
public String transform(DaoEntity _daoEntity) {
return DaoSerializer.getString(_daoEntity, _field);
}
});
}
@Override
public String save(Object _object) {
return saveEntity(_object.getClass(), DaoSerializer.toDaoEntity(_object, getType()));
}
@Override
public <T> Map<String, T> save(Collection<T> _objects) {
Map<String, T> ids = new HashMap<String, T>();
for (T o : _objects) {
String id = save(o);
if (NullUtils.isNotEmpty(id))
ids.put(id, o);
}
return ids;
}
@Override
public Map<String, DaoEntity> save(Class<?> _class, Collection<DaoEntity> _entities) {
Map<String, DaoEntity> ids = new HashMap<>();
for (DaoEntity e : _entities) {
ids.put(saveEntity(_class, e), e);
}
return ids;
}
@Override
public boolean delete(Class<?> _class, DaoQuery _query) {
return delete(DaoSerializer.getTableName(_class, getType()), _query);
}
@Override
public int count(Class<?> _class, DaoQuery _query) {
return count(DaoSerializer.getTableName(_class, getType()), _query);
}
private <T> Future<List<T>> submit(Callable<List<T>> _execution) {
return executor().submit(_execution);
}
private <T> Future<T> submit(QueryOneExecution<T> _execution) {
return executor().submit(_execution);
}
public void setMaxThreads(int _maxThreads) {
maxThreads = _maxThreads;
}
@Override
public void setExecutor(ExecutorService _executor) {
executor = _executor;
}
private synchronized ExecutorService executor() {
if (executor == null)
executor = Executors.newFixedThreadPool(maxThreads);
return executor;
}
protected <T> T toObject(DaoEntity _entity, Class<T> _class) {
return CollectionUtils.getFirst(toObjects(Collections.singletonList(_entity), _class));
}
protected <T> List<T> toObjects(List<DaoEntity> _entities, final Class<T> _class) {
return CollectionUtils.transform(_entities, new ITransformer<DaoEntity, T>() {
@Override
public T transform(DaoEntity _daoEntity) {
return DaoSerializer.fromDaoEntity(_daoEntity, _class, getType());
}
});
}
protected DaoQuery prepareQuery(DaoQuery _query) {
if (queryPreparer == null)
return _query;
return queryPreparer.prepareQuery(_query);
}
}

View File

@@ -0,0 +1,253 @@
package com.lanternsoftware.util.dao;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
import com.lanternsoftware.util.dao.annotations.DBClob;
import com.lanternsoftware.util.dao.annotations.DBIgnore;
import com.lanternsoftware.util.dao.annotations.DBName;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.DBType;
import com.lanternsoftware.util.dao.annotations.Important;
import com.lanternsoftware.util.dao.annotations.TimestampDates;
import com.lanternsoftware.util.dao.annotations.Unimportant;
public abstract class AbstractDaoSerializer<T> implements IDaoSerializer<T> {
protected final Map<Class<? extends Annotation>, List<String>> annotations = new HashMap<Class<? extends Annotation>, List<String>>();
protected final List<String> importantFields = new ArrayList<String>();
protected final Map<String, Integer> fieldTypes = new HashMap<String, Integer>();
public AbstractDaoSerializer() {
addFields(getSupportedClass());
}
public void addFields(Class<?> _class) {
if (_class == null) {
return;
}
List<String> important = new ArrayList<String>();
List<String> unimportant = new ArrayList<String>();
List<String> normal = new ArrayList<String>();
for (Field f : _class.getDeclaredFields()) {
if (!isSerializable(f))
continue;
String dbName = fieldToDatabaseName(f);
if (f.isAnnotationPresent(Important.class))
important.add(dbName);
else if (f.isAnnotationPresent(Unimportant.class))
unimportant.add(dbName);
else
normal.add(dbName);
Class<?> type = getType(f);
if (NullUtils.isOneOf(f.getType(), Byte.TYPE, byte.class))
fieldTypes.put(dbName, Types.INTEGER);
if (NullUtils.isOneOf(f.getType(), Short.TYPE, Short.class))
fieldTypes.put(dbName, Types.INTEGER);
else if (NullUtils.isOneOf(f.getType(), Integer.TYPE, Integer.class))
fieldTypes.put(dbName, Types.INTEGER);
else if (NullUtils.isOneOf(f.getType(), Long.TYPE, Long.class))
fieldTypes.put(dbName, Types.BIGINT);
else if (NullUtils.isOneOf(f.getType(), Double.TYPE, Double.class, Float.TYPE, Float.class))
fieldTypes.put(dbName, Types.DOUBLE);
else if (NullUtils.isOneOf(f.getType(), Boolean.TYPE, Boolean.class))
fieldTypes.put(dbName, Types.BIT);
else if (f.getType().equals(String.class) || f.getType().isEnum()) {
if (f.isAnnotationPresent(DBClob.class))
fieldTypes.put(dbName, Types.CLOB);
else
fieldTypes.put(dbName, Types.VARCHAR);
}
else if (f.getType().equals(Date.class)) {
if (DaoSerializer.isAnnotationPresent(_class, TimestampDates.class))
fieldTypes.put(dbName, Types.TIMESTAMP);
else
fieldTypes.put(dbName, Types.BIGINT);
}
for (Annotation a : f.getAnnotations()) {
CollectionUtils.addToMultiMap(a.annotationType(), dbName, annotations);
}
}
if (!important.isEmpty())
importantFields.addAll(important);
else {
normal.removeAll(unimportant);
importantFields.addAll(normal);
}
addFields(_class.getSuperclass());
}
@Override
public List<String> getFieldsByAnnotation(Class<? extends Annotation> _fieldAnnotation) {
return CollectionUtils.makeNotNull(annotations.get(_fieldAnnotation));
}
@Override
public List<String> getImportantFields() {
return importantFields;
}
@Override
public int getSqlType(String _fieldName) {
Integer type = fieldTypes.get(_fieldName);
if (type == null)
return Types.NULL;
return type;
}
@Override
public String getTableName() {
DBSerializable table = getSupportedClass().getAnnotation(DBSerializable.class);
if ((table != null) && NullUtils.isNotEmpty(table.name()))
return table.name();
return getterNameToDatabaseName(getSupportedClass().getSimpleName());
}
public static String fieldToGetterName(Field _field) {
String name = _field.getName();
return Character.toUpperCase(name.charAt(0)) + name.substring(1);
}
public static String getterNameToDatabaseName(String _name) {
return toSnake(_name);
}
public static String getterNameToDatabaseName(String _name, CaseFormat _format) {
return convertCase(_name, CaseFormat.PASCAL, _format);
}
public static String convertCase(String _name, CaseFormat _inFormat, CaseFormat _outFormat) {
if (_inFormat == _outFormat)
return _name;
String pascal;
if (_inFormat == CaseFormat.SNAKE)
pascal = toPascal(_name);
else if (_inFormat == CaseFormat.CAMEL)
pascal = Character.toUpperCase(_name.charAt(0)) + _name.substring(1);
else
pascal = _name;
if (_outFormat == CaseFormat.SNAKE)
return toSnake(pascal);
if (_outFormat == CaseFormat.CAMEL)
return Character.toLowerCase(pascal.charAt(0)) + pascal.substring(1);
return pascal;
}
private static String toPascal(String _snake) {
StringBuilder field = new StringBuilder();
boolean charWasWordStart = true;
for (int i = 0; i < _snake.length(); i++) {
if (_snake.charAt(i) == '_')
charWasWordStart = true;
else {
field.append(charWasWordStart?Character.toUpperCase(_snake.charAt(i)):_snake.charAt(i));
charWasWordStart = false;
}
}
return field.toString();
}
private static String toSnake(String _pascal) {
StringBuilder field = null;
boolean charWasUpper = false;
for (int i = 0; i < _pascal.length(); i++) {
if (Character.isUpperCase(_pascal.charAt(i))) {
if (field == null) {
field = new StringBuilder();
field.append(Character.toLowerCase(_pascal.charAt(i)));
}
else if (!charWasUpper) {
field.append("_");
field.append(Character.toLowerCase(_pascal.charAt(i)));
}
else {
field.append(Character.toLowerCase(_pascal.charAt(i)));
}
charWasUpper = true;
}
else {
charWasUpper = false;
if (field == null) {
field = new StringBuilder();
}
field.append(_pascal.charAt(i));
}
}
return field.toString();
}
public static String fieldToDatabaseName(Field _field) {
return fieldToDatabaseName(_field, CaseFormat.SNAKE);
}
public static String fieldToDatabaseName(Field _field, CaseFormat _format) {
DBName name = _field.getAnnotation(DBName.class);
if (name != null)
return name.name();
if (_format == CaseFormat.CAMEL)
return _field.getName();
return getterNameToDatabaseName(fieldToGetterName(_field), _format);
}
public static Class<?> getType(Field _f) {
DBType type = _f.getAnnotation(DBType.class);
if (type != null)
return type.type();
return _f.getType();
}
public static boolean isSerializable(Field _f) {
return isSerializable(_f, false);
}
public static boolean isSerializable(Field _f, boolean _serializeObjects) {
if (Modifier.isStatic(_f.getModifiers()) || Modifier.isTransient(_f.getModifiers()) || _f.isAnnotationPresent(DBIgnore.class))
return false;
if (_serializeObjects)
return true;
return !requiresCustomSerializer(_f);
}
public static boolean requiresCustomSerializer(Field _f) {
Class<?> type = getType(_f);
if (Collection.class.isAssignableFrom(type))
type = getCollectionType(_f);
return !(type.isPrimitive() || type.isEnum() || NullUtils.isOneOf(type, String.class, Date.class, BigDecimal.class, byte[].class, Boolean.class, Double.class, Long.class, Integer.class, Float.class));
}
public static Class<?> getCollectionType(Field _f) {
if (Collection.class.isAssignableFrom(getType(_f)) && (_f.getGenericType() instanceof ParameterizedType)) {
ParameterizedType t = (ParameterizedType) _f.getGenericType();
if (t.getActualTypeArguments().length > 0) {
Type t2 = t.getActualTypeArguments()[0];
if (t2 instanceof Class)
return (Class<?>)t2;
}
}
return null;
}
@Override
public List<DaoProxyType> getSupportedProxies() {
return Collections.emptyList();
}
@Override
public List<DaoSort> getIndexes() {
return null;
}
}

View File

@@ -0,0 +1,79 @@
package com.lanternsoftware.util.dao;
import java.io.File;
import java.io.FileInputStream;
import java.lang.annotation.Annotation;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.io.IOUtils;
import com.lanternsoftware.util.NullUtils;
public abstract class AnnotationFinder {
public static Map<String, String> findAnnotatedClasses(String _codePath, Class<? extends Annotation> _annotationClass) {
String annotationName = "@" + _annotationClass.getSimpleName();
Map<String, String> mapClasses = new TreeMap<>();
searchFile(new File(_codePath), mapClasses, annotationName);
Iterator<String> classIter = mapClasses.keySet().iterator();
while (classIter.hasNext()) {
String className = classIter.next();
try {
Class<?> clazz = Class.forName(className);
if (!clazz.isAnnotationPresent(_annotationClass))
classIter.remove();
} catch (ClassNotFoundException _e) {
//ignore
}
}
return mapClasses;
}
public static Map<String, String> findSubclasses(String _codePath, Class<?> _superclass) {
Map<String, String> mapClasses = new TreeMap<>();
searchFile(new File(_codePath), mapClasses, _superclass.getSimpleName());
Iterator<String> classIter = mapClasses.keySet().iterator();
while (classIter.hasNext()) {
String className = classIter.next();
Class<?> clazz = NullUtils.getClass(className, _superclass);
if (clazz == null)
classIter.remove();
}
return mapClasses;
}
private static void searchFile(File _f, Map<String, String> _mapClasses, String _searchString) {
if (_f == null) {
return;
}
if (_f.isDirectory()) {
for (File child : _f.listFiles()) {
searchFile(child, _mapClasses, _searchString);
}
}
else if (_f.getName().endsWith(".java")) {
try {
String source = IOUtils.toString(new FileInputStream(_f));
if (!source.contains(_searchString)) {
return;
}
int packagePos = source.indexOf("package ");
int packageEnd = source.indexOf(";", packagePos);
String packageName = source.substring(packagePos + 8, packageEnd);
int classPos = source.indexOf("public class") + 12;
while (source.charAt(classPos) == ' ')
classPos++;
int newLineN = source.indexOf("\n", classPos);
int newLineR = source.indexOf("\r", classPos);
int space = source.indexOf(" ", classPos);
int classEnd = NullUtils.min((newLineN == -1) ? Integer.MAX_VALUE : newLineN, (newLineR == -1) ? Integer.MAX_VALUE : newLineR, (space == -1) ? Integer.MAX_VALUE : space);
String className = source.substring(classPos, classEnd);
_mapClasses.put(packageName + "." + className, _f.getParent());
}
catch (Exception e) {
e.printStackTrace();
}
}
}
}

View File

@@ -0,0 +1,100 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import org.bson.Document;
public class DaoEntity implements Map<String, Object> {
private final Document map;
public DaoEntity() {
map = new Document();
}
public DaoEntity(Document _doc) {
map = _doc == null?new Document():_doc;
}
public DaoEntity(Map<String, ?> _map) {
map = new Document();
for (Entry<String, ?> e : _map.entrySet()) {
map.put(e.getKey(), e.getValue());
}
}
public DaoEntity(String _name, Object _o) {
map = new Document();
put(_name, _o);
}
public DaoEntity and(String _name, Object _o) {
put(_name, _o);
return this;
}
@Override
public int size() {
return map.size();
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public boolean containsKey(Object key) {
return map.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return map.containsValue(value);
}
@Override
public Object get(Object key) {
return map.get(key);
}
@Override
public Object put(String key, Object value) {
return map.put(key, value);
}
@Override
public Object remove(Object key) {
return map.remove(key);
}
@Override
public void putAll(Map<? extends String, ?> m) {
map.putAll(m);
}
@Override
public void clear() {
map.clear();
}
@Override
public Set<String> keySet() {
return map.keySet();
}
@Override
public Collection<Object> values() {
return map.values();
}
@Override
public Set<Entry<String, Object>> entrySet() {
return map.entrySet();
}
public Document toDocument() {
return map;
}
}

View File

@@ -0,0 +1,32 @@
package com.lanternsoftware.util.dao;
import java.util.List;
public class DaoPage<T> {
private List<T> results;
private int totalResultCount;
public DaoPage() {
}
public DaoPage(List<T> _results, int _totalResultCount) {
results = _results;
totalResultCount = _totalResultCount;
}
public List<T> getResults() {
return results;
}
public void setResults(List<T> _results) {
results = _results;
}
public int getTotalResultCount() {
return totalResultCount;
}
public void setTotalResultCount(int _totalResultCount) {
totalResultCount = _totalResultCount;
}
}

View File

@@ -0,0 +1,10 @@
package com.lanternsoftware.util.dao;
public enum DaoProxyType {
JDBC,
SOLR,
KUDU,
MONGO,
EPHEMERAL,
DYNAMODB
}

View File

@@ -0,0 +1,211 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.Map;
import org.bson.Document;
public class DaoQuery extends Document {
public DaoQuery() {
}
public DaoQuery(Map<String, Object> map) {
super(map);
}
public DaoQuery(String _name, Object _o) {
put(_name, _o);
}
public DaoQuery and(String _name, Object _o) {
put(_name, _o);
return this;
}
public DaoQuery or(DaoQuery _query) {
put("$or", _query);
return this;
}
public DaoQuery andIgnoreCase(String _name, Object _o) {
put(_name, new DaoQuery("$equalIgnoreCase", _o));
return this;
}
public DaoQuery andNotEquals(String _name, Object _o) {
put(_name, new DaoQuery("$ne", _o));
return this;
}
public DaoQuery andIn(String _name, Collection<String> _values) {
put(_name, new DaoQuery("$in", _values));
return this;
}
public DaoQuery andNotIn(String _name, Collection<String> _values) {
put(_name, new DaoQuery("$nin", _values));
return this;
}
public DaoQuery andInLongs(String _name, Collection<Long> _values) {
put(_name, new DaoQuery("$in", _values));
return this;
}
public DaoQuery andNotInLongs(String _name, Collection<Long> _values) {
put(_name, new DaoQuery("$nin", _values));
return this;
}
public DaoQuery andInIntegers(String _name, Collection<Integer> _values) {
put(_name, new DaoQuery("$in", _values));
return this;
}
public DaoQuery andNotInIntegers(String _name, Collection<Integer> _values) {
put(_name, new DaoQuery("$nin", _values));
return this;
}
public DaoQuery andGt(String _name, Object _o) {
put(_name, new DaoQuery("$gt", _o));
return this;
}
public DaoQuery andLt(String _name, Object _o) {
put(_name, new DaoQuery("$lt", _o));
return this;
}
public DaoQuery andGte(String _name, Object _o) {
put(_name, new DaoQuery("$gte", _o));
return this;
}
public DaoQuery andLte(String _name, Object _o) {
put(_name, new DaoQuery("$lte", _o));
return this;
}
public DaoQuery andBetween(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gt", _lowerBound).and("$lt", _upperBound));
return this;
}
public DaoQuery andBetweenInclusive(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gte", _lowerBound).and("$lte", _upperBound));
return this;
}
public DaoQuery andBetweenInclusiveExclusive(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gte", _lowerBound).and("$lt", _upperBound));
return this;
}
public DaoQuery andBetweenExclusiveInclusive(String _name, Object _lowerBound, Object _upperBound) {
put(_name, new DaoQuery("$gt", _lowerBound).and("$lte", _upperBound));
return this;
}
public DaoQuery andStartsWith(String _name, String _o) {
put(_name, new DaoQuery("$startsWith", _o));
return this;
}
public DaoQuery andStartsWithIgnoreCase(String _name, String _o) {
put(_name, new DaoQuery("$startsWithIgnoreCase", _o));
return this;
}
public DaoQuery andContains(String _name, String _o) {
put(_name, new DaoQuery("$contains", _o));
return this;
}
public DaoQuery andContainsIgnoreCase(String _name, String _o) {
put(_name, new DaoQuery("$containsIgnoreCase", _o));
return this;
}
public DaoQuery andNull(String _name) {
put(_name, "$null");
return this;
}
public DaoQuery andNotNull(String _name) {
put(_name, "$notnull");
return this;
}
public static DaoQuery notEquals(String _name, Object _value) {
return new DaoQuery().andNotEquals(_name, _value);
}
public static DaoQuery in(String _name, Collection<String> _values) {
return new DaoQuery().andIn(_name, _values);
}
public static DaoQuery notIn(String _name, Collection<String> _values) {
return new DaoQuery().andNotIn(_name, _values);
}
public static DaoQuery inLongs(String _name, Collection<Long> _values) {
return new DaoQuery().andInLongs(_name, _values);
}
public static DaoQuery notInLongs(String _name, Collection<Long> _values) {
return new DaoQuery().andNotInLongs(_name, _values);
}
public static DaoQuery inIntegers(String _name, Collection<Integer> _values) {
return new DaoQuery().andInIntegers(_name, _values);
}
public static DaoQuery notInIntegers(String _name, Collection<Integer> _values) {
return new DaoQuery().andNotInIntegers(_name, _values);
}
public static DaoQuery gt(String _name, Object _value) {
return new DaoQuery().andGt(_name, _value);
}
public static DaoQuery lt(String _name, Object _value) {
return new DaoQuery().andLt(_name, _value);
}
public static DaoQuery gte(String _name, Object _value) {
return new DaoQuery().andGte(_name, _value);
}
public static DaoQuery lte(String _name, Object _value) {
return new DaoQuery().andLte(_name, _value);
}
public static DaoQuery between(String _name, Object _lowerBound, Object _upperBound) {
return new DaoQuery().andBetween(_name, _lowerBound, _upperBound);
}
public static DaoQuery startsWith(String _name, String _value) {
return new DaoQuery().andStartsWith(_name, _value);
}
public static DaoQuery startsWithIgnoreCase(String _name, String _value) {
return new DaoQuery().andStartsWithIgnoreCase(_name, _value);
}
public static DaoQuery contains(String _name, String _value) {
return new DaoQuery().andContains(_name, _value);
}
public static DaoQuery containsIgnoreCase(String _name, String _value) {
return new DaoQuery().andContainsIgnoreCase(_name, _value);
}
public static DaoQuery isNull(String _name) {
return new DaoQuery().andNull(_name);
}
public static DaoQuery notNull(String _name) {
return new DaoQuery().andNotNull(_name);
}
}

View File

@@ -0,0 +1,913 @@
package com.lanternsoftware.util.dao;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
import org.bson.BsonBinaryReader;
import org.bson.BsonBinaryWriter;
import org.bson.Document;
import org.bson.codecs.BsonTypeClassMap;
import org.bson.codecs.BsonValueCodecProvider;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.DocumentCodec;
import org.bson.codecs.DocumentCodecProvider;
import org.bson.codecs.EncoderContext;
import org.bson.codecs.IterableCodec;
import org.bson.codecs.ValueCodecProvider;
import org.bson.codecs.configuration.CodecRegistries;
import org.bson.io.BasicOutputBuffer;
import org.bson.json.Converter;
import org.bson.json.JsonReader;
import org.bson.json.JsonWriterSettings;
import org.bson.json.StrictJsonWriter;
import org.bson.types.Binary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.DateUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.ZipUtils;
import com.lanternsoftware.util.dao.annotations.DBIndex;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
public class DaoSerializer {
private static final Logger LOG = LoggerFactory.getLogger(DaoSerializer.class);
private static final Map<Class<?>, List<IDaoSerializer>> serializers = new HashMap<>();
static {
for (IDaoSerializer serializer : ServiceLoader.load(IDaoSerializer.class)) {
CollectionUtils.addToMultiMap(serializer.getSupportedClass(), serializer, serializers);
}
}
public static void addSerializer(IDaoSerializer<?> _serializer) {
CollectionUtils.addToMultiMap(_serializer.getSupportedClass(), _serializer, serializers);
}
public static <T> IDaoSerializer<T> getSerializer(Class<T> _class) {
return getSerializer(_class, null);
}
public static <T> IDaoSerializer<T> getSerializer(Class<T> _class, DaoProxyType _proxyType) {
List<IDaoSerializer> classSerializers = serializers.get(_class);
if (classSerializers == null) {
LOG.error("No serializer exists for class " + _class.getCanonicalName());
return null;
}
if (_proxyType != null) {
for (IDaoSerializer serializer : classSerializers) {
if (serializer.getSupportedProxies().contains(_proxyType))
return serializer;
}
}
return CollectionUtils.getFirst(classSerializers);
}
public static DaoEntity toDaoEntity(Object _o) {
return toDaoEntity(_o, null);
}
public static DaoEntity toDaoEntity(Object _o, DaoProxyType _proxyType) {
if (_o == null) {
return null;
}
if (_o instanceof DaoEntity)
return (DaoEntity) _o;
if (_o instanceof Document)
return new DaoEntity((Document) _o);
IDaoSerializer serializer = getSerializer(_o.getClass(), _proxyType);
if (serializer == null)
return null;
try {
return serializer.toDaoEntity(_o);
}
catch (Exception _e) {
LOG.error("Failed to serialize entity", _e);
return null;
}
}
public static <T> T fromDaoEntity(DaoEntity _entity, Class<T> _class) {
return fromDaoEntity(_entity, _class, null);
}
public static <T> T fromDaoEntity(DaoEntity _entity, Class<T> _class, DaoProxyType _proxyType) {
if (_entity == null)
return null;
if (_class == DaoEntity.class)
return _class.cast(_entity);
if (_class == DaoQuery.class)
return _class.cast(new DaoQuery(_entity));
IDaoSerializer<T> serializer = getSerializer(_class, _proxyType);
if (serializer == null)
return null;
return serializer.fromDaoEntity(_entity);
}
public static String getTableName(Class<?> _class) {
return getTableName(_class, null);
}
public static String getTableName(Class<?> _class, DaoProxyType _proxyType) {
IDaoSerializer<?> serializer = getSerializer(_class, _proxyType);
if (serializer == null)
return null;
return serializer.getTableName();
}
public static List<String> getFieldsByAnnotation(Class<?> _entityClass, Class<? extends Annotation> _fieldAnnotation) {
return getFieldsByAnnotation(_entityClass, _fieldAnnotation, null);
}
public static List<String> getFieldsByAnnotation(Class<?> _entityClass, Class<? extends Annotation> _fieldAnnotation, DaoProxyType _proxyType) {
if (_entityClass == null) {
return Collections.emptyList();
}
IDaoSerializer<?> serializer = getSerializer(_entityClass, _proxyType);
if (serializer == null)
return Collections.emptyList();
return serializer.getFieldsByAnnotation(_fieldAnnotation);
}
public static List<String> getImportantFields(Class<?> _entityClass) {
if (_entityClass == null) {
return Collections.emptyList();
}
IDaoSerializer<?> serializer = getSerializer(_entityClass, null);
if (serializer == null)
return Collections.emptyList();
return serializer.getImportantFields();
}
public static int getSqlType(Class<?> _entityClass, String _fieldName) {
if (_entityClass == null) {
return Types.NULL;
}
IDaoSerializer<?> serializer = getSerializer(_entityClass, DaoProxyType.JDBC);
if (serializer == null) {
return Types.NULL;
}
return serializer.getSqlType(_fieldName);
}
public static int compare(DaoEntity _e, String _field, Object _comp) {
if (_comp instanceof String)
return NullUtils.compare(getString(_e, _field), (String) _comp);
if (_comp instanceof Date)
return NullUtils.compare(getDate(_e, _field), (Date) _comp);
if (_comp instanceof Long)
return NullUtils.compare(getLong(_e, _field), (Long) _comp);
if (_comp instanceof Short)
return NullUtils.compare(getShort(_e, _field), (Short) _comp);
if (_comp instanceof BigDecimal)
return NullUtils.compare(getBigDecimal(_e, _field), (BigDecimal) _comp);
if (_comp instanceof Double)
return NullUtils.compare(getDouble(_e, _field), (Double) _comp);
if (_comp instanceof Float)
return NullUtils.compare(getFloat(_e, _field), (Float) _comp);
if (_comp instanceof Integer)
return NullUtils.compare(getInteger(_e, _field), (Integer) _comp);
if (_comp instanceof Boolean)
return NullUtils.compare(getBoolean(_e, _field), (Boolean) _comp);
if (_comp instanceof Enum)
return NullUtils.compare(getString(_e, _field), ((Enum) _comp).name());
return 0;
}
public static String getString(DaoEntity _e, String _field) {
if (_e == null) {
return null;
}
return toString(_e.get(_field));
}
public static String toString(Object _o) {
if (_o instanceof String)
return (String) _o;
if (_o != null)
return String.valueOf(_o);
return null;
}
public static String getId(DaoEntity _e, Class<?> _entityClass) {
if (_e == null) {
return null;
}
String sPrimaryKeyField = CollectionUtils.getFirst(DaoSerializer.getFieldsByAnnotation(_entityClass, PrimaryKey.class));
if (NullUtils.isEmpty(sPrimaryKeyField)) {
sPrimaryKeyField = "_id";
}
return getString(_e, sPrimaryKeyField);
}
public static Date getDate(DaoEntity _e, String _field, long _lNullValue) {
if ((_e == null) || (!_e.containsKey(_field))) {
return null;
}
Object o = _e.get(_field);
if (o == null) {
return null;
}
if (o instanceof Timestamp) {
return new Date(((Timestamp) o).getTime());
}
if (o instanceof Date) {
return (Date) o;
}
long lDate = toLong(o);
if (lDate == _lNullValue) {
return null;
}
return new Date(lDate);
}
public static Date getDate(DaoEntity _e, String _sField) {
return getDate(_e, _sField, Long.MIN_VALUE);
}
public static Date getDate(DaoEntity _e, String _sField, String _format) {
return DateUtils.parse(_format, getString(_e, _sField));
}
public static Timestamp toTimestamp(Date _dt) {
if (_dt == null) {
return null;
}
return new Timestamp(_dt.getTime());
}
public static long toLong(Date _dt) {
return toLong(_dt, Long.MIN_VALUE);
}
public static long toLong(Date _dt, long _lNullValue) {
if (_dt == null) {
return _lNullValue;
}
return _dt.getTime();
}
public static short getShort(DaoEntity _e, String _sField) {
if (_e == null) {
return 0;
}
return toShort(_e.get(_sField));
}
public static short toShort(Object _o) {
try {
if (_o instanceof Short) {
return (Short) _o;
}
if (_o instanceof Integer) {
return ((Integer) _o).shortValue();
}
if (_o instanceof Long) {
return ((Long) _o).shortValue();
}
if (_o instanceof Double) {
return ((Double) _o).shortValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? (short) 1 : (short) 0;
}
if (_o instanceof String) {
return Short.valueOf((String) _o);
}
return (short) 0;
}
catch (Exception _e) {
return (short) 0;
}
}
public static BigDecimal getBigDecimal(DaoEntity _e, String _sField) {
if (_e == null) {
return new BigDecimal(0);
}
return toBigDecimal(_e.get(_sField));
}
public static BigDecimal toBigDecimal(Object _o) {
try {
if (_o instanceof BigDecimal) {
return (BigDecimal) _o;
}
if (_o instanceof Double) {
return new BigDecimal((Double) _o);
}
if (_o instanceof Integer) {
return new BigDecimal((Integer) _o);
}
if (_o instanceof Short) {
return new BigDecimal((Short) _o);
}
if (_o instanceof Long) {
return new BigDecimal((Long) _o);
}
if (_o instanceof Boolean) {
return new BigDecimal(((Boolean) _o) ? 1 : 0);
}
if (_o instanceof String) {
return new BigDecimal((String) _o);
}
return new BigDecimal(0);
}
catch (Exception _e) {
return new BigDecimal(0);
}
}
public static double getDouble(DaoEntity _e, String _sField) {
if (_e == null) {
return 0.0;
}
return toDouble(_e.get(_sField));
}
public static double toDouble(Object _o) {
try {
if (_o instanceof Double) {
return ((Double) _o).doubleValue();
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).doubleValue();
}
if (_o instanceof Integer) {
return ((Integer) _o).doubleValue();
}
if (_o instanceof Short) {
return ((Short) _o).doubleValue();
}
if (_o instanceof Long) {
return ((Long) _o).doubleValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1.0 : 0.0;
}
if (_o instanceof String) {
return Double.valueOf((String) _o);
}
return 0.0;
}
catch (Exception _e) {
return 0.0;
}
}
public static float getFloat(DaoEntity _e, String _sField) {
if (_e == null) {
return 0.0f;
}
return toFloat(_e.get(_sField));
}
public static float toFloat(Object _o) {
try {
if (_o instanceof Float) {
return ((Float) _o).floatValue();
}
if (_o instanceof Double) {
return ((Double) _o).floatValue();
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).floatValue();
}
if (_o instanceof Integer) {
return ((Integer) _o).floatValue();
}
if (_o instanceof Short) {
return ((Short) _o).floatValue();
}
if (_o instanceof Long) {
return ((Long) _o).floatValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1.0f : 0.0f;
}
if (_o instanceof String) {
return Float.valueOf((String) _o);
}
return 0.0f;
}
catch (Exception _e) {
return 0.0f;
}
}
public static int getInteger(DaoEntity _e, String _sField) {
if (_e == null) {
return 0;
}
return toInteger(_e.get(_sField));
}
public static int toInteger(Object _o) {
try {
if (_o instanceof Integer) {
return (Integer) _o;
}
if (_o instanceof Short) {
return ((Short) _o).intValue();
}
if (_o instanceof Long) {
return ((Long) _o).intValue();
}
if (_o instanceof Double) {
return ((Double) _o).intValue();
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).intValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1 : 0;
}
if (_o instanceof String) {
return Integer.valueOf((String) _o);
}
return 0;
}
catch (Exception _e) {
return 0;
}
}
public static long getLong(DaoEntity _e, String _sField) {
if (_e == null) {
return 0l;
}
return toLong(_e.get(_sField));
}
public static long toLong(Object _o) {
try {
if (_o instanceof Integer) {
return ((Integer) _o).longValue();
}
if (_o instanceof Short) {
return ((Short) _o).longValue();
}
if (_o instanceof Long) {
return (Long) _o;
}
if (_o instanceof BigDecimal) {
return ((BigDecimal) _o).longValue();
}
if (_o instanceof Double) {
return ((Double) _o).longValue();
}
if (_o instanceof Boolean) {
return ((Boolean) _o) ? 1L : 0L;
}
if (_o instanceof String) {
return Long.valueOf((String) _o);
}
return 0L;
}
catch (Exception _e) {
return 0L;
}
}
public static boolean getBoolean(DaoEntity _e, String _field) {
if (_e == null) {
return false;
}
return toBoolean(_e.get(_field));
}
public static boolean toBoolean(Object _o) {
return toBoolean(_o, false);
}
public static boolean toBoolean(Object _o, boolean _default) {
if (_o instanceof Boolean)
return (Boolean) _o;
if (_o instanceof String)
return ((String) _o).equalsIgnoreCase("true") || _o.equals("1");
if (_o instanceof Integer)
return ((Integer) _o) != 0;
if (_o instanceof Long)
return ((Long) _o) != 0;
if (_o instanceof BigDecimal)
return !(_o).equals(BigDecimal.ZERO);
return _default;
}
public static byte[] getByteArray(DaoEntity _e, String _sField) {
if (_e == null) {
return null;
}
Object o = _e.get(_sField);
if (o instanceof Binary)
return ((Binary)o).getData();
if (o instanceof byte[])
return (byte[]) o;
return null;
}
public static String toEnumName(Enum<?> _enum) {
if (_enum == null)
return "";
return _enum.name();
}
public static <T extends Enum<T>> List<String> toEnumNames(Collection<T> _enums) {
return CollectionUtils.transform(_enums, new ITransformer<T, String>() {
@Override
public String transform(T _enum) {
return toEnumName(_enum);
}
});
}
public static <T extends Enum<T>> T getEnum(DaoEntity _e, String _sField, Class<T> _enumType) {
return NullUtils.toEnum(_enumType, getString(_e, _sField));
}
public static <T extends Enum<T>> T getEnum(DaoEntity _e, String _sField, Class<T> _enumType, T _default) {
return NullUtils.toEnum(_enumType, getString(_e, _sField), _default);
}
public static <T extends Enum<T>> List<T> toEnums(Collection<String> _enumNames, final Class<T> _enumType) {
return CollectionUtils.transform(_enumNames, new ITransformer<String, T>() {
@Override
public T transform(String _s) {
return NullUtils.toEnum(_enumType, _s);
}
});
}
public static DaoEntity getDaoEntity(DaoEntity _e, String _field) {
if (_e == null)
return null;
return asDaoEntity(_e.get(_field));
}
public static DaoEntity asDaoEntity(Object _o) {
if (_o instanceof Document)
return new DaoEntity((Document) _o);
if (_o instanceof DaoEntity)
return (DaoEntity) _o;
return null;
}
public static <T> T getObject(DaoEntity _e, String _field, Class<T> _class) {
return getObject(_e, _field, _class, null);
}
public static <T> T getObject(DaoEntity _e, String _field, Class<T> _class, DaoProxyType _proxyType) {
return fromDaoEntity(getDaoEntity(_e, _field), _class, _proxyType);
}
public static List<DaoEntity> toDaoEntities(Collection<? extends Object> _objects) {
return toDaoEntities(_objects, null);
}
public static List<DaoEntity> toDaoEntities(Collection<? extends Object> _objects, DaoProxyType _proxyType) {
List<DaoEntity> entities = new ArrayList<>(CollectionUtils.size(_objects));
for (Object o : CollectionUtils.makeNotNull(_objects)) {
entities.add(toDaoEntity(o, _proxyType));
}
return entities;
}
public static List<DaoEntity> getDaoEntityList(DaoEntity _d, String _field) {
return getDaoEntityList(_d, _field, null);
}
public static List<DaoEntity> getDaoEntityList(DaoEntity _d, String _field, DaoProxyType _proxyType) {
Object list = (_d == null) ? null : _d.get(_field);
if (list instanceof Collection)
return toDaoEntities((Collection<?>) list, _proxyType);
return new ArrayList<>();
}
public static <T> List<T> getList(DaoEntity _d, String _sField, Class<T> _classOfT) {
return getList(_d, _sField, _classOfT, null);
}
public static <T> List<T> getList(DaoEntity _d, String _sField, Class<T> _classOfT, DaoProxyType _proxyType) {
if ((_d == null) || (!_d.containsKey(_sField)))
return new ArrayList<T>();
return fromList(_d.get(_sField), _classOfT, _proxyType);
}
public static <T> List<T> fromList(Object _list, Class<T> _classOfT) {
return fromList(_list, _classOfT, null);
}
public static <T> List<T> fromList(Object _list, Class<T> _classOfT, DaoProxyType _proxyType) {
if (_list instanceof List)
return fromList((List<?>) _list, _classOfT, _proxyType);
return new ArrayList<>();
}
public static <T> List<T> fromList(List<?> _list, Class<T> _classOfT) {
return fromList(_list, _classOfT, null);
}
public static <T> List<T> fromList(List<?> _list, Class<T> _classOfT, DaoProxyType _proxyType) {
List<T> objects = new ArrayList<>(CollectionUtils.size(_list));
for (Object object : CollectionUtils.makeNotNull(_list)) {
if (_classOfT.isInstance(object))
objects.add(_classOfT.cast(object));
else if (object instanceof Document)
objects.add(fromDaoEntity(new DaoEntity((Document) object), _classOfT, _proxyType));
else if (object instanceof DaoEntity)
objects.add(fromDaoEntity((DaoEntity) object, _classOfT, _proxyType));
}
return objects;
}
public static List<Field> getSerializableFields(Class<?> _class) {
return getSerializableFields(_class, false);
}
public static List<Field> getSerializableFields(Class<?> _class, boolean _serializeObjects) {
List<Field> fields = new ArrayList<Field>();
addSerializableFields(_class, fields, _serializeObjects);
return fields;
}
private static void addSerializableFields(Class<?> _class, List<Field> _fields, boolean _serializeObjects) {
if (_class == null) {
return;
}
for (Field field : _class.getDeclaredFields()) {
if (AbstractDaoSerializer.isSerializable(field, _serializeObjects))
_fields.add(field);
}
addSerializableFields(_class.getSuperclass(), _fields, _serializeObjects);
}
public static Set<String> getIndexedFields(Class<?> _class) {
return getIndexedFields(_class, null);
}
public static Set<String> getHashIndexFields(Class<?> _class) {
return getIndexedFields(_class, true);
}
public static Set<String> getRangeIndexFields(Class<?> _class) {
return getIndexedFields(_class, false);
}
private static Set<String> getIndexedFields(Class<?> _class, Boolean _hash) {
Set<String> indexedFields = new HashSet<String>();
DBSerializable def = _class.getAnnotation(DBSerializable.class);
if (def != null) {
for (DBIndex index : def.indexes()) {
if ((_hash == null) || (_hash == index.hash())) {
Collections.addAll(indexedFields, index.columns());
}
}
}
return indexedFields;
}
public static boolean isAnnotationPresent(Class<?> _class, Class<? extends Annotation> _fieldAnnotation) {
if (_class == null)
return false;
if (_class.isAnnotationPresent(_fieldAnnotation))
return true;
return isAnnotationPresent(_class.getSuperclass(), _fieldAnnotation);
}
public static <T extends Annotation> T getAnnotation(Class<?> _class, Class<T> _fieldAnnotation) {
if (_class == null)
return null;
T a = _class.getAnnotation(_fieldAnnotation);
if (a != null)
return a;
return getAnnotation(_class.getSuperclass(), _fieldAnnotation);
}
public static String toJson(Object _o) {
return toJson(_o, true);
}
public static String toSingleLineJson(Object _o) {
return toJson(toDaoEntity(_o), true, false);
}
public static String toSingleLineJson(DaoEntity _e) {
return toJson(_e, true, false);
}
public static byte[] toZipBson(Object _o) {
return toZipBson(toDaoEntity(_o));
}
public static byte[] toZipBson(DaoEntity _entity) {
if (_entity == null)
return null;
return ZipUtils.zip(toBson(_entity, true));
}
public static <T> T fromZipBson(byte[] _btZipBson, Class<T> _class) {
return DaoSerializer.fromDaoEntity(fromZipBson(_btZipBson), _class);
}
public static DaoEntity fromZipBson(byte[] _btZipBson) {
return fromBson(ZipUtils.unzip(_btZipBson));
}
public static <T> T fromBson(byte[] _btBson, Class<T> _class) {
return fromDaoEntity(fromBson(_btBson), _class);
}
public static DaoEntity fromBson(byte[] _btBson)
{
if (_btBson == null)
return null;
BsonBinaryReader reader = null;
try
{
reader = new BsonBinaryReader(ByteBuffer.wrap(_btBson).order(ByteOrder.LITTLE_ENDIAN));
Document doc = new DocumentCodec().decode(reader, DecoderContext.builder().build());
if (doc == null)
return null;
return new DaoEntity(doc);
}
catch (Throwable t)
{
LOG.error("Failed to convert bson to DaoEntity", t);
return null;
}
finally
{
if (reader != null)
reader.close();
}
}
public static byte[] toBson(Object _o) {
return toBson(toDaoEntity(_o));
}
public static byte[] toBson(Object _o, boolean _removeNulls) {
DaoEntity entity = toDaoEntity(_o);
if (_removeNulls)
removeNulls(entity.values());
return toBson(entity);
}
public static byte[] toBson(DaoEntity _entity) {
if (_entity == null)
return null;
BsonBinaryWriter writer = null;
try
{
BasicOutputBuffer buffer = new BasicOutputBuffer();
writer = new BsonBinaryWriter(buffer);
new DocumentCodec().encode(writer, _entity.toDocument(), EncoderContext.builder().build());
return buffer.toByteArray();
}
catch (Throwable t)
{
LOG.error("Failed to convert entity to BSON", t);
return null;
}
finally
{
if (writer != null)
writer.close();
}
}
public static String toJson(Object _o, boolean _removeNulls) {
return toJson(toDaoEntity(_o), _removeNulls);
}
public static String toJson(DaoEntity _e) {
return toJson(_e, true);
}
public static String toJson(DaoEntity _e, boolean _removeNulls) {
return toJson(_e, _removeNulls, true);
}
public static String toJson(DaoEntity _e, boolean _removeNulls, boolean _pretty) {
try {
if (_e != null) {
Document doc = _e.toDocument();
if (_removeNulls)
removeNulls(doc.values());
JsonWriterSettings.Builder settings = JsonWriterSettings.builder().int64Converter(new Converter<Long>() {
@Override
public void convert(Long _long, StrictJsonWriter _writer) {
if (_long != null)
_writer.writeNumber(_long.toString());
}
});
return doc.toJson(settings.indent(_pretty).build());
}
}
catch (Exception e) {
LOG.error("Failed to convert DaoEntity to json", e);
}
return null;
}
private static void removeNulls(Collection<Object> _doc) {
if (_doc == null)
return;
Iterator<Object> values = _doc.iterator();
while (values.hasNext()) {
Object o = values.next();
if (o == null)
values.remove();
else if (o instanceof DaoEntity)
removeNulls(((DaoEntity) o).values());
else if (o instanceof Document)
removeNulls(((Document) o).values());
else if (o instanceof Collection) {
Collection<Object> entities = (Collection<Object>) o;
removeNulls(entities);
if (entities.isEmpty())
values.remove();
}
}
}
public static String toJson(Collection<DaoEntity> _entities) {
StringBuilder b = null;
for (DaoEntity d : CollectionUtils.makeNotNull(_entities)) {
if (b == null)
b = new StringBuilder("[");
else
b.append(",");
b.append(toJson(d));
}
if (b == null)
return null;
b.append("]");
return b.toString();
}
public static <T> T parse(byte[] _json, Class<T> _class) {
return fromDaoEntity(parse(NullUtils.toString(_json)), _class);
}
public static <T> T parse(String _json, Class<T> _class) {
return fromDaoEntity(parse(_json), _class);
}
public static DaoEntity parse(String _json) {
if (NullUtils.isEmpty(_json))
return null;
try {
return new DaoEntity(Document.parse(_json));
}
catch (Exception _e) {
LOG.error("Failed to parse json", _e);
return null;
}
}
public static <T> List<T> parseList(String _json, Class<T> _class) {
return fromList(parseList(_json), _class);
}
public static List<DaoEntity> parseList(String _json) {
try {
List<DaoEntity> entities = new ArrayList<>();
JsonReader bsonReader = new JsonReader(_json);
for (Object o : new IterableCodec(CodecRegistries.fromProviders(Arrays.asList(new ValueCodecProvider(), new BsonValueCodecProvider(), new DocumentCodecProvider())), new BsonTypeClassMap()).decode(bsonReader, DecoderContext.builder().build())) {
if (o instanceof Document)
entities.add(new DaoEntity((Document) o));
}
return entities;
}
catch (Exception _e) {
LOG.error("Failed to parse json", _e);
return null;
}
}
public List<DaoSort> getIndexes(Class<?> _class) {
IDaoSerializer<?> serializer = getSerializer(_class);
return (serializer == null) ? new ArrayList<DaoSort>() : serializer.getIndexes();
}
}

View File

@@ -0,0 +1,15 @@
package com.lanternsoftware.util.dao;
public class DaoSerializerException extends RuntimeException {
public DaoSerializerException() {
}
public DaoSerializerException(String message) {
super(message);
}
public DaoSerializerException(String message, Throwable cause) {
super(message, cause);
}
public DaoSerializerException(Throwable cause) {
super(cause);
}
}

View File

@@ -0,0 +1,72 @@
package com.lanternsoftware.util.dao;
import java.util.ArrayList;
import java.util.List;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
public class DaoSort {
private final List<DaoSortField> fields = new ArrayList<DaoSortField>();
public DaoSort then(String _field) {
return then(_field, true);
}
public DaoSort thenIgnoreCase(String _field) {
return then(_field, true, true);
}
public DaoSort thenDesc(String _field) {
return then(_field, false);
}
public DaoSort thenDescIgnoreCase(String _field) {
return then(_field, false, true);
}
public DaoSort then(String _field, boolean _ascending) {
return then(_field, _ascending, false);
}
public DaoSort then(String _field, boolean _ascending, boolean _ignoreCase) {
fields.add(new DaoSortField(_field, _ascending, false));
return this;
}
public static DaoSort sort(String _field) {
return new DaoSort().then(_field);
}
public static DaoSort sortIgnoreCase(String _field) {
return new DaoSort().thenIgnoreCase(_field);
}
public static DaoSort sortDesc(String _field) {
return new DaoSort().thenDesc(_field);
}
public static DaoSort sortDescIgnoreCase(String _field) {
return new DaoSort().thenDescIgnoreCase(_field);
}
public List<DaoSortField> getFields() {
return fields;
}
public static DaoSort fromQueryParams(List<String> _queryParams) {
return fromQueryParams(_queryParams, CaseFormat.CAMEL, CaseFormat.SNAKE);
}
public static DaoSort fromQueryParams(List<String> _queryParams, final CaseFormat _paramFormat, final CaseFormat _dbFormat) {
DaoSort sort = new DaoSort();
sort.fields.addAll(CollectionUtils.transform(_queryParams, new ITransformer<String, DaoSortField>() {
@Override
public DaoSortField transform(String _s) {
return DaoSortField.fromQueryParam(_s, _paramFormat, _dbFormat);
}
}));
return sort;
}
}

View File

@@ -0,0 +1,53 @@
package com.lanternsoftware.util.dao;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
public class DaoSortField {
private String field;
private boolean ascending;
private boolean ignoreCase;
public DaoSortField() {
}
public DaoSortField(String _field, boolean _ascending, boolean _ignoreCase) {
field = _field;
ascending = _ascending;
}
public String getField() {
return field;
}
public void setField(String _field) {
field = _field;
}
public boolean isAscending() {
return ascending;
}
public void setAscending(boolean _ascending) {
ascending = _ascending;
}
public boolean isIgnoreCase() {
return ignoreCase;
}
public void setIgnoreCase(boolean _ignoreCase) {
ignoreCase = _ignoreCase;
}
public static DaoSortField fromQueryParam(String _param) {
return fromQueryParam(_param, CaseFormat.CAMEL, CaseFormat.SNAKE);
}
public static DaoSortField fromQueryParam(String _param, CaseFormat _paramFormat, CaseFormat _dbFormat) {
if (NullUtils.isEmpty(_param))
return null;
String[] parts = _param.split(",");
return new DaoSortField(AbstractDaoSerializer.convertCase(parts[0], _paramFormat, _dbFormat), !(parts.length > 1 && NullUtils.isEqual(parts[1], "desc")), false);
}
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import com.lanternsoftware.util.CollectionUtils;
public abstract class EntityPreparer {
public Collection<DaoEntity> prepareEntities(Collection<DaoEntity> _entities) {
for (DaoEntity entity : CollectionUtils.makeNotNull(_entities)) {
prepareEntity(entity);
}
return _entities;
}
public abstract void prepareEntity(DaoEntity _entity);
}

View File

@@ -0,0 +1,66 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
public interface IDaoProxy {
void shutdown();
DaoProxyType getType();
<T> List<T> queryAll(Class<T> _class);
<T> T queryOne(Class<T> _class, DaoQuery _query);
<T> T queryOne(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> T queryOne(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> Future<T> queryOneAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T> List<T> query(Class<T> _class, DaoQuery _query);
<T> List<T> query(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields);
<T> Future<List<T>> queryAsync(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
<T, V> Future<List<V>> queryWithFinalizer(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, QueryFinalizer<T, V> _finalizer);
<T> List<T> query(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
<T> DaoPage<T> queryPage(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query);
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query);
<T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count);
<T> DaoPage<T> queryImportantPage(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count);
DaoEntity queryForEntity(String _tableName, DaoQuery _query);
DaoEntity queryForEntity(String _tableName, DaoQuery _query, DaoSort _sort);
DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields);
DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
DaoPage<DaoEntity> queryForEntitiesPage(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, DaoSort _sort);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
List<DaoEntity> queryForEntities(String _tableName, DaoQuery _query, Collection<String> _fields, DaoSort _sort, int _offset, int _count);
String queryForOneField(Class<?> _class, DaoQuery _query, String _field);
List<String> queryForField(Class<?> _class, DaoQuery _query, String _field);
List<String> queryForField(Class<?> _class, DaoQuery _query, String _field, DaoSort _sort);
List<String> queryForField(String _tableName, DaoQuery _query, String _field);
String save(Object _object);
<T> Map<String, T> save(Collection<T> _objects);
Map<String, DaoEntity> save(Class<?> _class, Collection<DaoEntity> _entities);
void update(Class<?> _class, DaoQuery _query, DaoEntity _changes);
<T> T updateOne(Class<T> _class, DaoQuery _query, DaoEntity _changes);
String saveEntity(Class<?> _class, DaoEntity _entity);
String saveEntity(String _collection, DaoEntity _entity);
boolean delete(Class<?> _class, DaoQuery _query);
boolean delete(String _tableName, DaoQuery _query);
int count(Class<?> _class, DaoQuery _query);
int count(String _tableName, DaoQuery _query);
boolean exists(Class<?> _class, DaoQuery _query);
boolean exists(String _tableName, DaoQuery _query);
void setExecutor(ExecutorService _executor);
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.dao;
import java.lang.annotation.Annotation;
import java.util.List;
public interface IDaoSerializer<T> {
Class<T> getSupportedClass();
String getTableName();
List<String> getFieldsByAnnotation(Class<? extends Annotation> _fieldAnnotation);
List<String> getImportantFields();
int getSqlType(String _fieldName);
DaoEntity toDaoEntity(T _t);
T fromDaoEntity(DaoEntity _entity);
List<DaoProxyType> getSupportedProxies();
List<DaoSort> getIndexes();
}

View File

@@ -0,0 +1,38 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
public class QueryExecution<V> implements Callable<List<V>> {
private final IDaoProxy proxy;
private final Class<V> clazz;
private final DaoQuery query;
private final Collection<String> fields;
private final DaoSort sort;
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query) {
this(_proxy, _class, _query, null, null);
}
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, DaoSort _sort) {
this(_proxy, _class, _query, null, _sort);
}
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields) {
this(_proxy, _class, _query, _fields, null);
}
public QueryExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
proxy = _proxy;
clazz = _class;
query = _query;
fields = _fields;
sort = _sort;
}
@Override
public List<V> call() throws Exception {
return proxy.query(clazz, query, fields, sort);
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util.dao;
import java.util.List;
public abstract class QueryFinalizer<I, O> {
public abstract List<O> finalize(IDaoProxy _proxy, List<I> _input);
}

View File

@@ -0,0 +1,28 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
public class QueryFinalizerExecution<I,O> implements Callable<List<O>> {
private final IDaoProxy proxy;
private final Class<I> clazz;
private final DaoQuery query;
private final Collection<String> fields;
private final DaoSort sort;
private final QueryFinalizer<I, O> finalizer;
public QueryFinalizerExecution(IDaoProxy _proxy, Class<I> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort, QueryFinalizer<I,O> _finalizer) {
proxy = _proxy;
clazz = _class;
query = _query;
fields = _fields;
sort = _sort;
finalizer = _finalizer;
}
@Override
public List<O> call() {
return finalizer.finalize(proxy, proxy.query(clazz, query, fields, sort));
}
}

View File

@@ -0,0 +1,37 @@
package com.lanternsoftware.util.dao;
import java.util.Collection;
import java.util.concurrent.Callable;
public class QueryOneExecution<V> implements Callable<V> {
private final IDaoProxy proxy;
private final Class<V> clazz;
private final DaoQuery query;
private final Collection<String> fields;
private final DaoSort sort;
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query) {
this(_proxy, _class, _query, null, null);
}
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, DaoSort _sort) {
this(_proxy, _class, _query, null, _sort);
}
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields) {
this(_proxy, _class, _query, _fields, null);
}
public QueryOneExecution(IDaoProxy _proxy, Class<V> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
proxy = _proxy;
clazz = _class;
query = _query;
fields = _fields;
sort = _sort;
}
@Override
public V call() throws Exception {
return proxy.queryOne(clazz, query, fields, sort);
}
}

View File

@@ -0,0 +1,50 @@
package com.lanternsoftware.util.dao;
import java.util.Map;
import java.util.Map.Entry;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
public class QueryPreparer {
private final CaseFormat queryCaseFormat;
private final CaseFormat dbCaseFormat;
private final Map<String, String> fieldReplacements;
private final Map<String, String> fieldSuffixExceptions;
public QueryPreparer(CaseFormat _queryCaseFormat, CaseFormat _dbCaseFormat) {
this(_queryCaseFormat, _dbCaseFormat, null);
}
public QueryPreparer(CaseFormat _queryCaseFormat, CaseFormat _dbCaseFormat, Map<String, String> _fieldReplacements) {
this(_queryCaseFormat, _dbCaseFormat, _fieldReplacements, null);
}
public QueryPreparer(CaseFormat _queryCaseFormat, CaseFormat _dbCaseFormat, Map<String, String> _fieldReplacements, Map<String, String> _fieldSuffixExceptions) {
queryCaseFormat = _queryCaseFormat;
dbCaseFormat = _dbCaseFormat;
fieldReplacements = _fieldReplacements;
fieldSuffixExceptions = _fieldSuffixExceptions;
}
public DaoQuery prepareQuery(DaoQuery _query) {
DaoQuery query = new DaoQuery();
for (Entry<String, Object> e : _query.entrySet()) {
if (fieldReplacements != null) {
String rep = fieldReplacements.get(e.getKey());
if (rep != null) {
query.put(rep, e.getValue());
continue;
}
}
String field = AbstractDaoSerializer.convertCase(e.getKey(), queryCaseFormat, dbCaseFormat);
if (fieldSuffixExceptions != null) {
for (Entry<String, String> entry : fieldSuffixExceptions.entrySet()) {
if (field.endsWith(entry.getKey()))
field = field.substring(0, field.length()-entry.getKey().length()) + entry.getValue();
}
}
query.put(field, e.getValue());
}
return query;
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util.dao.annotations;
public enum CaseFormat {
SNAKE,
CAMEL,
PASCAL
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBClob {
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBIgnore {
String name() default "";
}

View File

@@ -0,0 +1,14 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.List;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBIndex {
String[] columns();
boolean hash() default false;
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBName {
String name() default "";
}

View File

@@ -0,0 +1,16 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface DBSerializable {
String name() default "";
String seq() default "";
CaseFormat caseFormat() default CaseFormat.SNAKE;
DBIndex[] indexes() default {};
boolean autogen() default true;
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface DBType {
Class<?> type() default String.class;
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Important {
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface NeverUpdate {
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface PrimaryKey {
}

View File

@@ -0,0 +1,12 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface StringDates {
String format() default "yyyy-MM-dd'T'hh:mm:ss.SSS";
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface TimestampDates {
}

View File

@@ -0,0 +1,11 @@
package com.lanternsoftware.util.dao.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Unimportant {
}

View File

@@ -0,0 +1,137 @@
package com.lanternsoftware.util.dao.csv;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.IDaoSerializer;
public abstract class CSVStream {
protected static final Logger LOG = LoggerFactory.getLogger(CSVStream.class);
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class));
}
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class, DaoEntity _entity) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class), _entity);
}
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class), _headerTransformer);
}
public static <T> Iterator<T> parse(InputStream _is, Class<T> _class, DaoEntity _entity, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, DaoSerializer.getSerializer(_class), _entity, _headerTransformer);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer) {
return new CSVIterator<T>(_is, _serializer);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _entity) {
return new CSVIterator<T>(_is, _serializer, _entity);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, _serializer, _headerTransformer);
}
public static <T> Iterator<T> parse(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _entity, ITransformer<String, String> _headerTransformer) {
return new CSVIterator<T>(_is, _serializer, _entity, _headerTransformer);
}
private static class CSVIterator<T> implements Iterator<T> {
private final BufferedReader reader;
private final String[] headers;
private final IDaoSerializer<T> serializer;
private final DaoEntity metadata;
private String[] line = null;
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer) {
this(_is, _serializer, null, null);
}
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _metadata) {
this(_is, _serializer, _metadata, null);
}
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer, ITransformer<String, String> _headerTransformer) {
this(_is, _serializer, null, _headerTransformer);
}
public CSVIterator(InputStream _is, IDaoSerializer<T> _serializer, DaoEntity _metadata, ITransformer<String, String> _headerTransformer){
reader = new BufferedReader(new InputStreamReader(_is));
headers = line();
if ((_headerTransformer != null) && (headers != null)) {
for (int i=0; i<headers.length; i++) {
headers[i] = _headerTransformer.transform(headers[i]);
}
}
line = line();
serializer = _serializer;
metadata = _metadata;
}
@Override
public boolean hasNext() {
if (line == null) {
IOUtils.closeQuietly(reader);
return false;
}
return true;
}
@Override
public T next() {
DaoEntity entity = new DaoEntity();
if(metadata!=null){
Set<Map.Entry<String, Object>> entryset = metadata.entrySet();
for (Map.Entry<String, Object> entry : entryset) {
entity.put(entry.getKey(), entry.getValue());
}
}
for (int i = 0; i < headers.length; i++) {
entity.put(headers[i], CollectionUtils.get(line, i));
}
T t;
try {
t = serializer.fromDaoEntity(entity);
}
catch (RuntimeException e) {
line = line();
throw e;
}
line = line();
return t;
}
@Override
public void remove() {
}
private String[] line() {
try {
String line = reader.readLine();
if (line == null)
return null;
return line.split("\\s*,\\s*");
} catch (IOException _e) {
LOG.error("Failed to parse CSV", _e);
return null;
}
}
}
}

View File

@@ -0,0 +1,379 @@
package com.lanternsoftware.util.dao.generator;
import java.io.File;
import java.io.FileOutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.AnnotationFinder;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import com.lanternsoftware.util.dao.annotations.StringDates;
import com.lanternsoftware.util.dao.annotations.TimestampDates;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.DateUtils;
import com.lanternsoftware.util.ITransformer;
import com.lanternsoftware.util.NullUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DaoSerializerGenerator {
private static final Logger LOG = LoggerFactory.getLogger(DaoSerializerGenerator.class);
public static void generateSerializers(String _codePath) {
generateSerializers(_codePath, false, null);
}
public static void generateSerializers(String _codePath, boolean _serializeNestedObjects, List<DaoProxyType> _proxyTypes) {
generateSerializers(_codePath, _serializeNestedObjects, _proxyTypes, true);
}
public static void generateSerializers(String _codePath, boolean _serializeNestedObjects, List<DaoProxyType> _proxyTypes, boolean _generateSpiFile) {
if (CollectionUtils.isEmpty(_proxyTypes))
_proxyTypes = Collections.singletonList(DaoProxyType.MONGO);
Map<String, List<String>> serializers = new HashMap<>();
for (Map.Entry<String, String> e : AnnotationFinder.findAnnotatedClasses(_codePath, DBSerializable.class).entrySet()) {
SerializerGenerationResult result = generateSerializer(e.getKey(), e.getValue() + File.separator + "dao" + File.separator, _serializeNestedObjects, null, _proxyTypes, null);
if (result == null)
continue;
int idx = e.getValue().indexOf(File.separator + "src" + File.separator);
if (idx > -1)
CollectionUtils.addToMultiMap(e.getValue().substring(0, idx + 9) + File.separator + "resources" + File.separator + "META-INF" + File.separator + "services" + File.separator, result.getClassName(), serializers);
}
if (_generateSpiFile) {
for (Entry<String, List<String>> entry : serializers.entrySet()) {
new File(entry.getKey()).mkdirs();
FileOutputStream f = null;
try {
f = new FileOutputStream(entry.getKey() + "com.lanternsoftware.util.dao.IDaoSerializer");
Collections.sort(entry.getValue());
for (String className : entry.getValue()) {
f.write(NullUtils.toByteArray(className));
f.write(NullUtils.toByteArray("\r\n"));
}
}
catch (Exception e) {
LOG.error("Failed to create service loader file", e);
}
finally {
IOUtils.closeQuietly(f);
}
}
}
}
/**
* @param _className
* The name of the class to be serialized
* @param _outputPath
* The path to write the generated serializer to
* @param _serializeNestedObjects
* if true, the serializer will save a hierarchical structure containing all sub objects. If false, it only serializes primitives
* @param _fieldNameExceptions
* a mapping of standard names to actual names to handle special cases where existing objects aren't following proper naming conventions
* @param _intendedProxyTypes
* a list of proxy types that can use this generated serializer (can pass null if it should be used for all proxies)
* @param _primaryKey
* The database primary key field. For Mongo, this will be changed to "_id" in the serializer.
* @return The class name of the generated serializer
*/
public static SerializerGenerationResult generateSerializer(String _className, String _outputPath, boolean _serializeNestedObjects, Map<String, String> _fieldNameExceptions, List<DaoProxyType> _intendedProxyTypes, String _primaryKey) {
try {
Set<Class<?>> customSerializerFields = new HashSet<>();
Class<?> clazz = Class.forName(_className);
String packagePath;
int srcPos = _outputPath.indexOf(File.separator + "java" + File.separator);
if (srcPos > -1) {
packagePath = _outputPath.substring(srcPos + 6).replace(File.separator, ".");
if (packagePath.endsWith("."))
packagePath = packagePath.substring(0, packagePath.length() - 1);
}
else
packagePath = clazz.getPackage().getName() + ".dao";
StringBuilder serializer = new StringBuilder();
serializer.append("package ");
serializer.append(packagePath);
serializer.append(";\n\n");
Set<Class<?>> imports = CollectionUtils.asHashSet(AbstractDaoSerializer.class, DaoEntity.class, DaoSerializer.class, clazz);
if (CollectionUtils.isNotEmpty(_intendedProxyTypes)) {
imports.add(DaoProxyType.class);
imports.add(List.class);
if (_intendedProxyTypes.size() > 1)
imports.add(Arrays.class);
else
imports.add(Collections.class);
}
Map<String, List<Field>> mapFields = new HashMap<>();
List<Field> fields = DaoSerializer.getSerializableFields(clazz, _serializeNestedObjects);
for (Field f : fields) {
for (Annotation a : f.getAnnotations()) {
CollectionUtils.addToMultiMap(a.annotationType().getCanonicalName(), f, mapFields);
}
Class<?> type = AbstractDaoSerializer.getType(f);
if (Collection.class.isAssignableFrom(type)) {
Class<?> elementClass = ((Class<?>) (((ParameterizedType) f.getGenericType()).getActualTypeArguments()[0]));
imports.add(elementClass);
}
else if (type.isEnum() || AbstractDaoSerializer.requiresCustomSerializer(f))
imports.add(type);
}
CaseFormat caseFormat = CaseFormat.SNAKE;
DBSerializable dbSerializable = DaoSerializer.getAnnotation(clazz, DBSerializable.class);
if ((dbSerializable != null) && (dbSerializable.caseFormat() != null))
caseFormat = dbSerializable.caseFormat();
StringDates dateFormat = DaoSerializer.getAnnotation(clazz, StringDates.class);
if (dateFormat != null)
imports.add(DateUtils.class);
Field primaryKey = CollectionUtils.getFirst(mapFields.get(PrimaryKey.class.getCanonicalName()));
if (primaryKey != null) {
if (primaryKey.getType() != String.class)
imports.add(NullUtils.class);
if (NullUtils.isEmpty(_primaryKey))
_primaryKey = AbstractDaoSerializer.fieldToDatabaseName(primaryKey, caseFormat);
}
List<String> imp = CollectionUtils.transform(imports, new ITransformer<Class<?>, String>() {
@Override
public String transform(Class<?> _class) {
return "import " + _class.getCanonicalName() + ";\n";
}
});
Collections.sort(imp);
for (String i : imp) {
serializer.append(i);
}
serializer.append("\npublic class ");
serializer.append(clazz.getSimpleName());
serializer.append("Serializer extends AbstractDaoSerializer<");
serializer.append(clazz.getSimpleName());
serializer.append(">\n{\n");
if (dateFormat != null) {
serializer.append("\tprivate static final String FORMAT = \"");
serializer.append(dateFormat.format());
serializer.append("\";\n\n");
}
serializer.append("\t@Override\n\tpublic Class<");
serializer.append(clazz.getSimpleName());
serializer.append("> getSupportedClass()\n\t{\n\t\treturn ");
serializer.append(clazz.getSimpleName());
serializer.append(".class;\n\t}\n\n");
String intendedType = "DaoProxyType." + CollectionUtils.getFirst(_intendedProxyTypes).name();
if (CollectionUtils.isNotEmpty(_intendedProxyTypes)) {
serializer.append("\t@Override\n\tpublic List<DaoProxyType> getSupportedProxies() {\n\t\t");
if (_intendedProxyTypes.size() > 1) {
serializer.append("return Arrays.asList(");
serializer.append(CollectionUtils.transformToCommaSeparated(_intendedProxyTypes, new ITransformer<DaoProxyType, String>() {
@Override
public String transform(DaoProxyType _daoProxyType) {
return "DaoProxyType." + _daoProxyType.name();
}
}));
serializer.append(");\n\t}\n");
}
else {
serializer.append("return Collections.singletonList(");
serializer.append(intendedType);
serializer.append(");\n\t}\n\n");
}
}
serializer.append("\t@Override\n\tpublic DaoEntity toDaoEntity(");
serializer.append(clazz.getSimpleName());
serializer.append(" _o)\n\t{\n\t\tDaoEntity d = new DaoEntity(");
serializer.append(");\n");
StringBuilder from = new StringBuilder();
from.append("\t@Override\n\tpublic ");
from.append(clazz.getSimpleName());
from.append(" fromDaoEntity(DaoEntity _d)\n\t{\n\t\t");
from.append(clazz.getSimpleName());
from.append(" o = new ");
from.append(clazz.getSimpleName());
from.append("();\n");
for (Field f : fields) {
String databaseField = AbstractDaoSerializer.fieldToDatabaseName(f, caseFormat);
boolean customSerializer = AbstractDaoSerializer.requiresCustomSerializer(f);
Class<?> type = AbstractDaoSerializer.getType(f);
String classField = AbstractDaoSerializer.fieldToGetterName(f);
if (_fieldNameExceptions != null) {
String override = _fieldNameExceptions.get(classField);
if (NullUtils.isNotEmpty(override))
classField = override;
}
Class<?> collType = null;
if (Collection.class.isAssignableFrom(type))
collType = ((Class<?>) (((ParameterizedType) f.getGenericType()).getActualTypeArguments()[0]));
boolean mongoPrimaryKey = NullUtils.isEqual(databaseField, _primaryKey) && _intendedProxyTypes.contains(DaoProxyType.MONGO);
if (mongoPrimaryKey) {
databaseField = "_id";
serializer.append("\t\tif (_o.get");
serializer.append(classField);
serializer.append("() != null)");
serializer.append("\n\t\t\td.put(\"");
serializer.append(databaseField);
serializer.append("\", _o.get");
serializer.append(classField);
from.append("\t\to.set");
from.append(classField);
serializer.append("());\n");
from.append("(DaoSerializer.getString(_d, \"");
from.append(databaseField);
from.append("\"));\n");
}
else {
serializer.append("\t\td.put(\"");
serializer.append(databaseField);
if (customSerializer) {
if (Collection.class.isAssignableFrom(type)) {
serializer.append("\", DaoSerializer.toDaoEntities(_o.get");
serializer.append(classField);
customSerializerFields.add(collType);
}
else {
serializer.append("\", DaoSerializer.toDaoEntity(_o.get");
customSerializerFields.add(type);
serializer.append(classField);
}
if (NullUtils.isEmpty(intendedType))
serializer.append("()));\n");
else {
serializer.append("(), ");
serializer.append(intendedType);
serializer.append("));\n");
}
}
else if (type.equals(Date.class)) {
if (dateFormat != null) {
serializer.append("\", DateUtils.format(FORMAT, _o.get");
}
else if (DaoSerializer.isAnnotationPresent(clazz, TimestampDates.class))
serializer.append("\", DaoSerializer.toTimestamp(_o.get");
else
serializer.append("\", DaoSerializer.toLong(_o.get");
serializer.append(classField);
serializer.append("()));\n");
}
else if (type.isEnum()) {
serializer.append("\", DaoSerializer.toEnumName(_o.get");
serializer.append(classField);
serializer.append("()));\n");
}
else {
if (type.getName().equals("boolean")) {
serializer.append("\", _o.is");
}
else {
serializer.append("\", _o.get");
}
serializer.append(classField);
serializer.append("());\n");
}
from.append("\t\to.set");
from.append(classField);
from.append("(DaoSerializer.get");
if (type.getName().equals("int")) {
from.append("Integer");
}
else if (type.equals(Date.class)) {
from.append("Date");
}
else if (type.equals(String.class)) {
from.append("String");
}
else if (type.equals(BigDecimal.class)) {
from.append("BigDecimal");
}
else if (type.equals(byte[].class)) {
from.append("ByteArray");
}
else if (type.isEnum()) {
from.append("Enum");
}
else if (customSerializer) {
if (Collection.class.isAssignableFrom(type))
from.append("List");
else
from.append("Object");
}
else {
from.append(type.getSimpleName().substring(0, 1).toUpperCase());
from.append(type.getSimpleName().substring(1));
}
from.append("(_d, \"");
from.append(databaseField);
from.append("\"");
if (type.equals(Date.class) && (dateFormat != null))
from.append(", FORMAT");
else if (Collection.class.isAssignableFrom(type)) {
from.append(", ");
from.append(collType.getSimpleName());
from.append(".class");
}
else if (type.isEnum() || customSerializer) {
from.append(", ");
from.append(type.getSimpleName());
from.append(".class");
}
from.append("));\n");
}
}
serializer.append("\t\treturn d;\n\t}\n\n");
serializer.append(from.toString());
serializer.append("\t\treturn o;\n\t}\n}");
FileOutputStream f = null;
try {
if ((dbSerializable == null) || dbSerializable.autogen()) {
new File(_outputPath).mkdirs();
f = new FileOutputStream(_outputPath + clazz.getSimpleName() + "Serializer.java");
f.write(NullUtils.toByteArray(serializer.toString()));
}
return new SerializerGenerationResult(packagePath + "." + clazz.getSimpleName() + "Serializer", customSerializerFields);
}
catch (Exception e) {
LOG.error("Failed to write serializer", e);
return null;
}
finally {
IOUtils.closeQuietly(f);
}
}
catch (ClassNotFoundException e) {
return null;
}
}
private static boolean doesImplement(Class<?> _class, String _interfaceName) {
if ((_class == null) || _class.equals(Object.class))
return false;
for (Class<?> intf : _class.getInterfaces()) {
if (intf.getSimpleName().equals(_interfaceName))
return true;
if (doesImplement(intf, _interfaceName))
return true;
}
return doesImplement(_class.getSuperclass(), _interfaceName);
}
}

View File

@@ -0,0 +1,91 @@
package com.lanternsoftware.util.dao.generator;
import java.lang.reflect.Field;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.AnnotationFinder;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.DBClob;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import com.lanternsoftware.util.dao.annotations.TimestampDates;
public class SchemaGenerator {
public static void generateSchema(String _sourceCodeFolder) {
Map<String, String> classes = AnnotationFinder.findAnnotatedClasses(_sourceCodeFolder, DBSerializable.class);
for (String className : classes.keySet()) {
try {
Class<?> clazz = Class.forName(className);
if ((clazz == null) || !DaoSerializer.isAnnotationPresent(clazz, DBSerializable.class))
continue;
System.out.println(generateTableCreateStatement(clazz));
}
catch (ClassNotFoundException _e) {
}
}
}
public static String generateTableCreateStatement(Class<?> _entity) {
StringBuilder sql = new StringBuilder("CREATE TABLE ");
boolean timestampDates = _entity.getAnnotation(TimestampDates.class) != null;
String tableName = DaoSerializer.getTableName(_entity);
sql.append(tableName);
sql.append(" (");
boolean bFirst = true;
List<String> keys = DaoSerializer.getFieldsByAnnotation(_entity, PrimaryKey.class);
for (Field f : DaoSerializer.getSerializableFields(_entity)) {
String name = AbstractDaoSerializer.fieldToDatabaseName(f);
if (name == null)
continue;
StringBuilder col = new StringBuilder(name);
col.append(" ");
if (NullUtils.isOneOf(f.getType(), Byte.TYPE, byte.class))
col.append("NUMBER(3,0)");
else if (NullUtils.isOneOf(f.getType(), Short.TYPE, Short.class))
col.append("NUMBER(5,0)");
else if (NullUtils.isOneOf(f.getType(), Integer.TYPE, Integer.class))
col.append("NUMBER(10,0)");
else if (NullUtils.isOneOf(f.getType(), Long.TYPE, Long.class))
col.append("NUMBER(19,0)");
else if (NullUtils.isOneOf(f.getType(), Double.TYPE, Double.class, Float.TYPE, Float.class))
col.append("NUMBER(19,4)");
else if (NullUtils.isOneOf(f.getType(), Boolean.TYPE, Boolean.class))
col.append("NUMBER(1,0)");
else if (f.getType().equals(String.class) || f.getType().isEnum()) {
if (f.getAnnotation(DBClob.class) != null)
col.append("CLOB");
else
col.append("VARCHAR(255)");
}
else if (f.getType().equals(Date.class)) {
if (timestampDates)
col.append("TIMESTAMP");
else
col.append("NUMBER(19,0)");
}
else
continue;
if ((f.getAnnotation(PrimaryKey.class) != null) && (keys.size() == 1))
col.append(" PRIMARY KEY");
if (!bFirst)
sql.append(",");
else
bFirst = false;
sql.append(col);
}
if (keys.size() > 1) {
sql.append(", CONSTRAINT ");
sql.append(tableName);
sql.append("_pk PRIMARY KEY (");
sql.append(CollectionUtils.commaSeparated(keys));
sql.append(")");
}
sql.append(");");
return sql.toString();
}
}

View File

@@ -0,0 +1,21 @@
package com.lanternsoftware.util.dao.generator;
import java.util.Set;
public class SerializerGenerationResult {
private final String className;
private final Set<Class<?>> fieldsNeedingCustomSerializers;
public SerializerGenerationResult(String _className, Set<Class<?>> _fieldsNeedingCustomSerializers) {
className = _className;
fieldsNeedingCustomSerializers = _fieldsNeedingCustomSerializers;
}
public String getClassName() {
return className;
}
public Set<Class<?>> getFieldsNeedingCustomSerializers() {
return fieldsNeedingCustomSerializers;
}
}

View File

@@ -0,0 +1,200 @@
package com.lanternsoftware.util.dao.generator;
import java.io.File;
import java.io.FileOutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.annotations.PrimaryKey;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.CollectionUtils;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.AnnotationFinder;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.annotations.CaseFormat;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
public class SwiftModelGenerator {
private static final Logger LOG = LoggerFactory.getLogger(SwiftModelGenerator.class);
public static void generateModel(String _codePath, String _outputPath) {
for (Entry<String, String> e : AnnotationFinder.findAnnotatedClasses(_codePath, DBSerializable.class).entrySet()) {
generateSerializer(e.getKey(), e.getValue().replace(_codePath, _outputPath) + File.separator + "bson" + File.separator);
}
}
private static SerializerGenerationResult generateSerializer(String _className, String _outputPath) {
try {
Set<Class<?>> customSerializerFields = new HashSet<>();
Class<?> clazz = Class.forName(_className);
DBSerializable dbSerializable = DaoSerializer.getAnnotation(clazz, DBSerializable.class);
if (dbSerializable == null)
return null;
CaseFormat caseFormat = dbSerializable.caseFormat();
StringBuilder bson = new StringBuilder();
bson.append("import Foundation\nimport BSON\n\nclass ");
bson.append(clazz.getSimpleName());
bson.append(":LanternObject {\n");
List<Field> fields = DaoSerializer.getSerializableFields(clazz, true);
Map<String, List<Field>> mapFields = new HashMap<>();
for (Field f : fields) {
if (Modifier.isStatic(f.getModifiers()) || Modifier.isTransient(f.getModifiers()))
continue;
for (Annotation a : f.getAnnotations()) {
CollectionUtils.addToMultiMap(a.annotationType().getCanonicalName(), f, mapFields);
}
}
String primaryKeyName = null;
Field primaryKey = CollectionUtils.getFirst(mapFields.get(PrimaryKey.class.getCanonicalName()));
if (primaryKey != null) {
primaryKeyName = AbstractDaoSerializer.fieldToDatabaseName(primaryKey, caseFormat);
}
for (Field f : fields) {
bson.append("\tvar ");
bson.append(f.getName());
bson.append(":");
bson.append(typeToSwift(f));
bson.append("?\n");
}
bson.append("\n\tinit() {}\n\n\trequired init(bson: Document) {\n");
for (Field f : fields) {
bson.append("\t\tself.");
bson.append(f.getName());
String databaseName = AbstractDaoSerializer.fieldToDatabaseName(f, caseFormat);
if (NullUtils.isEqual(databaseName, primaryKeyName))
databaseName = "_id";
if (AbstractDaoSerializer.getCollectionType(f) != null) {
bson.append(" = BsonUtils.getList(bson:bson, field:\"");
bson.append(databaseName);
bson.append("\")\n");
}
else if (AbstractDaoSerializer.requiresCustomSerializer(f)) {
bson.append(" = BsonUtils.getObject(bson:bson, field:\"");
bson.append(databaseName);
bson.append("\")\n");
}
else {
bson.append(" = bson[\"");
bson.append(databaseName);
bson.append("\"] as? ");
bson.append(typeToSwift(f));
bson.append("\n");
}
}
bson.append("\t}\n\n\tfunc toBSON()->Document {\n\t\tlet bson: Document = [");
boolean first = true;
for (Field f : fields) {
if (!first)
bson.append(",");
else
first = false;
String databaseName = AbstractDaoSerializer.fieldToDatabaseName(f, caseFormat);
if (NullUtils.isEqual(databaseName, primaryKeyName))
databaseName = "_id";
bson.append("\n\t\t\t\"");
bson.append(databaseName);
bson.append("\": ");
if (AbstractDaoSerializer.getCollectionType(f) != null) {
bson.append("BsonUtils.toDocument(coll:self.");
bson.append(f.getName());
bson.append(")");
}
else if (AbstractDaoSerializer.requiresCustomSerializer(f)) {
bson.append("BsonUtils.toDocument(obj:self.");
bson.append(f.getName());
bson.append(")");
}
else {
bson.append("self.");
bson.append(f.getName());
}
}
bson.append("\n\t\t]\n\t\treturn bson\n\t}\n}");
FileOutputStream f = null;
try {
// if (dbSerializable.autogen()) {
new File(_outputPath).mkdirs();
f = new FileOutputStream(_outputPath + clazz.getSimpleName() + ".swift");
f.write(NullUtils.toByteArray(bson.toString()));
// }
return new SerializerGenerationResult(clazz.getSimpleName() + ".swift", customSerializerFields);
}
catch (Exception e) {
e.printStackTrace();
return null;
}
finally {
IOUtils.closeQuietly(f);
}
}
catch (ClassNotFoundException e) {
return null;
}
}
private static String typeToSwift(Field _f) {
if (Collection.class.isAssignableFrom(_f.getType()))
return "[" + typeToSwift(AbstractDaoSerializer.getCollectionType(_f)) + "]";
return typeToSwift(_f.getType());
}
private static String typeToSwift(Class<?> _class) {
if (NullUtils.isOneOf(_class, Integer.TYPE, Integer.class))
return "Int32";
if (NullUtils.isOneOf(_class, Long.TYPE, Long.class))
return "Int64";
if (NullUtils.isOneOf(_class, Double.TYPE, Double.class))
return "Double";
if (NullUtils.isOneOf(_class, Float.TYPE, Float.class))
return "Float";
if (NullUtils.isOneOf(_class, Boolean.TYPE, Boolean.class))
return "Bool";
if (_class.equals(Date.class))
return "Date";
if (_class.equals(String.class))
return "String";
if (_class.equals(byte[].class))
return "[Uint8]";
if (_class.isEnum())
return "String";
if (NullUtils.isOneOf(_class, Short.TYPE, Short.class))
return "Int16";
if (NullUtils.isOneOf(_class, Byte.TYPE, Byte.class))
return "Int8";
return _class.getSimpleName();
}
private static boolean doesImplement(Class<?> _class, String _interfaceName) {
if ((_class == null) || _class.equals(Object.class))
return false;
for (Class<?> intf : _class.getInterfaces()) {
if (intf.getSimpleName().equals(_interfaceName))
return true;
if (doesImplement(intf, _interfaceName))
return true;
}
return doesImplement(_class.getSuperclass(), _interfaceName);
}
}

View File

@@ -0,0 +1,79 @@
package com.lanternsoftware.util.dao.jdbc;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.NullUtils;
public class DataSourceProxy extends AbstractJdbcProxy {
private static final Logger LOG = LoggerFactory.getLogger(DataSourceProxy.class);
private DataSource dataSource;
private String schemaName;
public DataSourceProxy(String _jndiDataSourceName, String _schemaName) {
this(_jndiDataSourceName);
schemaName = _schemaName;
}
public DataSourceProxy(String _jndiDataSourceName) {
try {
dataSource = (DataSource)new InitialContext().lookup(_jndiDataSourceName);
}
catch (Exception e) {
try {
dataSource = (DataSource)new InitialContext().lookup("java:/comp/env/"+_jndiDataSourceName);
}
catch (Exception _e) {
LOG.error("Error looking up " + _jndiDataSourceName, e);
}
}
}
public DataSourceProxy(DataSource _dataSource) {
dataSource = _dataSource;
try {
DatabaseMetaData metaData = getConnection().getMetaData();
if (metaData.getDatabaseProductName().equals("Oracle") && (metaData.getDatabaseMajorVersion() >= 12))
databaseType = DatabaseType.ORACLE_12C;
}
catch (SQLException _e) {
LOG.error("Could not get database type", _e);
}
}
@Override
public Connection getConnection() {
try {
Connection conn = dataSource.getConnection();
if (NullUtils.isNotEmpty(schemaName) && (conn != null))
conn.setSchema(schemaName);
return conn;
}
catch (SQLException _e) {
LOG.error("Failed to get a jdbc connection", _e);
return null;
}
}
@Override
public boolean isConnected() {
try {
return getConnection().isValid(10);
}
catch (Exception _e) {
LOG.error("Failed to get a jdbc connection", _e);
return false;
}
}
@Override
public boolean alwaysClose() {
return true;
}
}

View File

@@ -0,0 +1,8 @@
package com.lanternsoftware.util.dao.jdbc;
public enum DatabaseType {
ORACLE_11G,
ORACLE_12C,
MYSQL,
CACHE
}

View File

@@ -0,0 +1,91 @@
package com.lanternsoftware.util.dao.jdbc;
import com.lanternsoftware.util.NullUtils;
import com.lanternsoftware.util.dao.annotations.DBSerializable;
@DBSerializable
public class JdbcConfig {
private DatabaseType type;
private String username;
private String password;
private String hostname;
private String database;
private String port;
public JdbcConfig() {
}
public JdbcConfig(DatabaseType _type, String _username, String _password, String _hostname, String _database, String _port) {
type = _type;
username = _username;
password = _password;
hostname = _hostname;
database = _database;
port = _port;
}
public DatabaseType getType() {
return type;
}
public void setType(DatabaseType _type) {
type = _type;
}
public String getUsername() {
return username;
}
public void setUsername(String _username) {
username = _username;
}
public String getPassword() {
return password;
}
public void setPassword(String _password) {
password = _password;
}
public String getHostname() {
return hostname;
}
public void setHostname(String _hostname) {
hostname = _hostname;
}
public String getDatabase() {
return database;
}
public void setDatabase(String _database) {
database = _database;
}
public String getPort() {
return port;
}
public void setPort(String _port) {
port = _port;
}
public String getConnectionString() {
StringBuilder conn = new StringBuilder("jdbc:");
if (type == DatabaseType.MYSQL)
conn.append("mysql");
else
conn.append("oracle:thin");
conn.append("://");
conn.append(hostname);
conn.append(":");
conn.append(port);
if (NullUtils.isNotEmpty(database)) {
conn.append("/");
conn.append(database);
}
return conn.toString();
}
}

View File

@@ -0,0 +1,62 @@
package com.lanternsoftware.util.dao.jdbc;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JdbcProxy extends AbstractJdbcProxy {
private static final Logger LOG = LoggerFactory.getLogger(JdbcProxy.class);
private Connection connection;
public JdbcProxy(Connection _connection) {
connection = _connection;
}
@Override
public Connection getConnection() {
return connection;
}
@Override
public boolean isConnected() {
try {
return connection.isValid(10);
}
catch (Exception _e) {
LOG.error("Failed to get a jdbc connection", _e);
return false;
}
}
@Override
public boolean alwaysClose() {
return false;
}
public static JdbcProxy getProxy(JdbcConfig _config) {
return getProxy(_config.getType(), _config.getConnectionString(), _config.getUsername(), _config.getPassword());
}
public static JdbcProxy getProxy(DatabaseType _type, String _connectionString, String _username, String _password) {
String driver;
if (_type == DatabaseType.MYSQL)
driver = "com.mysql.cj.jdbc.Driver";
else if (_type == DatabaseType.CACHE)
driver = "com.intersys.jdbc.CacheDriver";
else
driver = "oracle.jdbc.driver.OracleDriver";
try {
DriverManager.registerDriver(Class.forName(driver).asSubclass(Driver.class).newInstance());
JdbcProxy proxy = new JdbcProxy(DriverManager.getConnection(_connectionString, _username, _password));
proxy.databaseType = _type;
return proxy;
}
catch (Exception _e) {
LOG.error("Failed to load JDBC driver for database type: " + _type, _e);
return null;
}
}
}

View File

@@ -0,0 +1,7 @@
package com.lanternsoftware.util.dao.jdbc;
public abstract class OracleTestProxy {
public static JdbcProxy getProxy(String _connectionString, String _username, String _password) {
return JdbcProxy.getProxy(DatabaseType.ORACLE_11G, _connectionString, _username, _password);
}
}

View File

@@ -0,0 +1,51 @@
package com.lanternsoftware.util.dao.jdbc.dao;
import com.lanternsoftware.util.dao.AbstractDaoSerializer;
import com.lanternsoftware.util.dao.DaoEntity;
import com.lanternsoftware.util.dao.DaoProxyType;
import com.lanternsoftware.util.dao.DaoSerializer;
import com.lanternsoftware.util.dao.jdbc.DatabaseType;
import com.lanternsoftware.util.dao.jdbc.JdbcConfig;
import java.util.Collections;
import java.util.List;
public class JdbcConfigSerializer extends AbstractDaoSerializer<JdbcConfig>
{
@Override
public Class<JdbcConfig> getSupportedClass()
{
return JdbcConfig.class;
}
@Override
public List<DaoProxyType> getSupportedProxies() {
return Collections.singletonList(DaoProxyType.MONGO);
}
@Override
public DaoEntity toDaoEntity(JdbcConfig _o)
{
DaoEntity d = new DaoEntity();
d.put("type", DaoSerializer.toEnumName(_o.getType()));
d.put("username", _o.getUsername());
d.put("password", _o.getPassword());
d.put("hostname", _o.getHostname());
d.put("database", _o.getDatabase());
d.put("port", _o.getPort());
return d;
}
@Override
public JdbcConfig fromDaoEntity(DaoEntity _d)
{
JdbcConfig o = new JdbcConfig();
o.setType(DaoSerializer.getEnum(_d, "type", DatabaseType.class));
o.setUsername(DaoSerializer.getString(_d, "username"));
o.setPassword(DaoSerializer.getString(_d, "password"));
o.setHostname(DaoSerializer.getString(_d, "hostname"));
o.setDatabase(DaoSerializer.getString(_d, "database"));
o.setPort(DaoSerializer.getString(_d, "port"));
return o;
}
}

View File

@@ -0,0 +1,89 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.util.Collection;
import java.util.Iterator;
import java.util.TreeSet;
/**
* Utility class that houses the logic to determine bucket sizes for batched in-clauses.
*/
class BatchBucket {
private static final int MAX_BATCH_SIZE = 1000;
private static final Collection<Integer> predefinedBucketSizes = new TreeSet<Integer>();
static {
predefinedBucketSizes.add(1);
predefinedBucketSizes.add(10);
predefinedBucketSizes.add(25);
predefinedBucketSizes.add(50);
predefinedBucketSizes.add(100);
predefinedBucketSizes.add(500);
predefinedBucketSizes.add(MAX_BATCH_SIZE);
}
private final Collection<Integer> bucketSizes = new TreeSet<Integer>();
private int maxBatchSize;
/**
* Default Constructor. The default maximum batch size is currently 1000.
*/
public BatchBucket() {
bucketSizes.addAll(predefinedBucketSizes);
maxBatchSize = MAX_BATCH_SIZE;
}
/**
* Batch-size Constructor
*
* @param _nMaxBatchSize
* - Integer specifying the maximum batch size. If this value is greater than the maximum batch size (1000),
* it will be truncated.
*/
public BatchBucket(int _nMaxBatchSize) {
if (_nMaxBatchSize <= 0 || _nMaxBatchSize > MAX_BATCH_SIZE) {
bucketSizes.addAll(predefinedBucketSizes);
maxBatchSize = MAX_BATCH_SIZE;
return;
}
Iterator<Integer> iter = predefinedBucketSizes.iterator();
while (iter.hasNext()) {
int nNextBatchSize = iter.next();
if (nNextBatchSize < _nMaxBatchSize) {
maxBatchSize = nNextBatchSize;
bucketSizes.add(nNextBatchSize);
}
else
maxBatchSize = _nMaxBatchSize;
}
bucketSizes.add(_nMaxBatchSize);
}
/**
* @return the calculated maximum batch size
*/
public int getMaxBatchSize() {
return maxBatchSize;
}
/**
* Method to calculate the appropriate batch size for a specific current size
*
* @param _nCurSize
* - Integer representing the current size of the statement
* @return an Integer representing the batch size
*/
public int getBatchSize(int _nCurSize) {
if (_nCurSize <= 0)
return 0;
Iterator<Integer> iter = bucketSizes.iterator();
while (iter.hasNext()) {
int nNextBatchSize = iter.next();
if (_nCurSize <= nNextBatchSize)
return nNextBatchSize;
}
return 0;
}
}

View File

@@ -0,0 +1,148 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.sql.PreparedStatement;
import java.util.Iterator;
import java.util.LinkedList;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
/**
* Class that represents a single in-clause statement (all columns - {@link InClauseColumn}) Example: Batch Size: 2
* select * from table where (record_id, str_val) in (('1','2'),('3','4')) InClause is an abstract representation for
* all of the pieces of information: InClauseColumn Column Display: record_id Parameters: '1', '3' InClauseColumn Column
* Display: str_val Parameters: '2', '4'
*/
class InClause {
private final InClauseColumn[] columns;
private BatchBucket batchBucket;
private final int startIdx;
private final boolean returnAllIfEmpty;
/**
* Default Constructor
*
* @param _startIdx
* - Integer representing the index of the in-clause inside the SQL statement
* @param _returnAllIfEmpty
* - a boolean flag that determines the resulting behavior if the {@link InClauseColumn}s are empty. If true,
* the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with
* '1 = 0'.
* @param _columns
* - a variable list of {@link InClauseColumn}s that make up the in-clause
*/
public InClause(int _startIdx, boolean _returnAllIfEmpty, InClauseColumn... _columns) {
columns = _columns;
startIdx = _startIdx;
returnAllIfEmpty = _returnAllIfEmpty;
batchBucket = new BatchBucket(0);
}
/**
* Method which defines a maximum batch size used to calculate in-clause batch sizes.
*
* @param _batchSize
*/
public void setMaxBatchSize(int _batchSize) {
batchBucket = new BatchBucket(_batchSize);
}
/**
* Method to take all of the {@link InClauseColumn}s that make up the in-clause and turned those into a list of
* {@link InClauseBatchedParameter}s. When executing an in-clause, some databases restrict the number of items that may
* be in a specified in-clause (currently the limit is 1000). This item-limit includes items for all columns (i.e. if
* you have two columns, each column may only contribute half of the total - i.e. 500 items). Therefore, we need to
* build a list of batched parameters that do not exceed the maximum item threshold, but at the same time is batched
* appropriately (to minimize the number of {@link PreparedStatement}s that will be generated. When optimizing batch
* sizes, there are a few special cases that are noted below. <strong>Special Case 1:</strong> The batch-size is less
* than the number of {@link InClauseColumn}s In this situation, the batch size will be adjusted to the number of
* columns. For example, if you set the batch size to 1 and you pass in 2 columns, your optimized batch size will be 2.
* <strong>Special Case 2:</strong> No {@link PreparedParameter}s are set on any {@link InClauseColumn}s In this
* situation, the behavior will depend on the ReturnAllIfEmpty flag passed in during the constructor. If true, the
* in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with '1 = 0'.
* <strong>Special Case 3:</strong> A single {@link InClauseColumn} with a single {@link PreparedParameter}s is set. In
* this situation, the in-clause will be optimized to be an equals (i.e. = ? and not in (?)).
*
* @return a {@link LinkedList} of optimum-sized batched {@link PreparedParameter}s
*/
public LinkedList<InClauseBatchedParameter> getBatchedParameters() {
LinkedList<InClauseBatchedParameter> listReturnParameters = new LinkedList<InClauseBatchedParameter>();
if (columns == null || columns.length == 0)
return listReturnParameters;
int nColumnCnt = columns.length;
int nCurrentSize = 0;
InClauseBatchedParameter batchedParameters = new InClauseBatchedParameter(columns);
listReturnParameters.add(batchedParameters);
// special case 2: if there are no parameters, we will honor the behavior of returnAllIfEmpty
if (nColumnCnt >= 1 && columns[0].getParameterCnt() == 0) {
batchedParameters.setReturnAllIfEmpty(returnAllIfEmpty);
return listReturnParameters;
}
// special case 3: if there is only 1 column and 1 element in the column, we can use = , not in()
if (nColumnCnt == 1 && columns[0].getParameterCnt() == 1) {
batchedParameters.addParameter(columns[0].getNextParameter());
return listReturnParameters;
}
InClauseBatchedParameter lastBatchedParameters = null;
boolean bMoreParameters = true;
while (bMoreParameters) {
if (/* special case 1 */nCurrentSize > 0 && (nCurrentSize + nColumnCnt) > batchBucket.getMaxBatchSize()) {
batchedParameters = new InClauseBatchedParameter(columns);
listReturnParameters.add(batchedParameters);
nCurrentSize = 0;
}
lastBatchedParameters = new InClauseBatchedParameter(columns);
for (InClauseColumn column : columns) {
if (column == null)
continue;
PreparedParameter parameter = column.getNextParameter();
batchedParameters.addParameter(parameter);
lastBatchedParameters.addParameter(parameter);
nCurrentSize++;
if (!column.hasNextParameter())
bMoreParameters = false;
}
}
// optimize the size of the remaining bucket
if (lastBatchedParameters != null) {
int nBatchSize = batchBucket.getBatchSize(nCurrentSize);
while (nCurrentSize < nBatchSize) {
// if we are going to add more parameters than the bucket size, we need to break now
if (nCurrentSize + nColumnCnt > nBatchSize)
break;
Iterator<PreparedParameter> iter = lastBatchedParameters.getParameters().iterator();
while (iter.hasNext()) {
batchedParameters.addParameter(iter.next());
++nCurrentSize;
}
}
}
return listReturnParameters;
}
/**
* @return Integer representing the starting index in the SQL statement
*/
public int getStartIndex() {
return startIdx;
}
/**
* Method to reset the clause after an evaluation
*/
public void reset() {
for (InClauseColumn column : columns) {
if (column != null)
column.reset();
}
}
}

View File

@@ -0,0 +1,90 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.sql.PreparedStatement;
import java.util.LinkedList;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
/**
* Class that represents a collection of data to be set into a {@link PreparedStatement} ({@link PreparedParameter}).
* This will represent all of the data associated with an {@link InClauseColumn} Example: Batch Size: 2 select * from
* table where (record_id, str_val) in (('1','2'),('3','4')) InClauseBatchedParameter represents the Collection of
* batched parameters need to execute the InClause: Collection<PreparedParameter>: '1', '2', '3', '4' If we change the
* batch size to one, the InClause would now have 2 InClauseBatchedParameters: Collection<PreparedParameter>: '1', '3'
* Collection<PreparedParameter>: '2', '4'
*/
public class InClauseBatchedParameter {
private final LinkedList<PreparedParameter> parameters = new LinkedList<PreparedParameter>();
private final InClauseColumn[] columns;
private int size = 0;
private boolean returnAllIfEmpty = false;
/**
* Default Constructor
*
* @param _arrColumns
* - a variable list of {@link InClauseColumn}s that make up the in-clause
*/
public InClauseBatchedParameter(InClauseColumn[] _arrColumns) {
columns = _arrColumns;
}
/**
* Method to add a new primitive parameter to the batched Collection
*
* @param _parameter
* - the {@link PreparedParameter} to be added
*/
public void addParameter(PreparedParameter _parameter) {
parameters.add(_parameter);
++size;
}
/**
* @param _bReturnAllIfEmpty
* - a boolean flag that determines the resulting behavior if the {@link InClauseColumn}s are empty. If true,
* the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with
* '1 = 0'
*/
public void setReturnAllIfEmpty(boolean _bReturnAllIfEmpty) {
returnAllIfEmpty = _bReturnAllIfEmpty;
}
/**
* @return boolean; if true, the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will
* replace with '1 = 0'
*/
public boolean isReturnAllIfEmpty() {
return returnAllIfEmpty;
}
/**
* @return Integer representing the number of batched {@link PreparedParameter}s
*/
public int getBatchParameterCnt() {
return size;
}
/**
* @return Integer representing the number of {@link InClauseColumn}s that make up the batched
* {@link PreparedParameter}s
*/
public int getColumnCnt() {
return columns.length;
}
/**
* @return the array of {@link InClauseColumn}s that make up the batched {@link PreparedParameter}s
*/
public InClauseColumn[] getColumns() {
return columns;
}
/**
* @return the {@link LinkedList} of batched {@link PreparedParameter}s
*/
public LinkedList<PreparedParameter> getParameters() {
return parameters;
}
}

View File

@@ -0,0 +1,148 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
class InClauseBuilder {
private final Map<Integer, InClause> inClauses = new HashMap<Integer, InClause>();
private int maxBatchSize = 0;
/**
* @param _clause
* - {@link InClause} to be processed.
*/
public void addClause(InClause _clause) {
if (_clause != null) {
inClauses.put(_clause.getStartIndex(), _clause);
}
}
/**
* Method which defines a maximum batch size used to calculate in-clause batch sizes.
*
* @param _batchSize
*/
public void setMaxBatchSize(int _batchSize) {
maxBatchSize = _batchSize;
}
/**
* Method to generate a Collection of {@link InClauseStatement}s to be executed. These statements represent all possible combinations of {@link InClauseBatchedParameter}s from all {@link InClause}s that will need to be executed in order to fulfill the set in-clauses.
*
* @return Collection of {@link InClauseStatement}s to be executed. Will not return null, but return an empty Collection
*/
public Collection<InClauseStatement> buildStatements() {
Collection<InClauseStatement> collStatements = new ArrayList<InClauseStatement>();
if (inClauses.isEmpty()) {
return collStatements;
}
Map<Integer, LinkedList<InClauseBatchedParameter>> mapBatchedParameters = new HashMap<Integer, LinkedList<InClauseBatchedParameter>>();
Iterator<Entry<Integer, InClause>> iter = inClauses.entrySet().iterator();
while (iter.hasNext()) {
Entry<Integer, InClause> entry = iter.next();
if (entry == null) {
continue;
}
int nStartIdx = entry.getKey();
InClause inClause = entry.getValue();
if (inClause == null) {
continue;
}
inClause.setMaxBatchSize(maxBatchSize);
LinkedList<InClauseBatchedParameter> batchedParameters = inClause.getBatchedParameters();
mapBatchedParameters.put(nStartIdx, batchedParameters);
}
buildInClauseStatements(mapBatchedParameters, collStatements);
return collStatements;
}
private void buildInClauseStatements(Map<Integer, LinkedList<InClauseBatchedParameter>> _batchedParameters, Collection<InClauseStatement> _inStatements) {
if (_batchedParameters.isEmpty()) {
return;
}
Entry<Integer, LinkedList<InClauseBatchedParameter>> entry = _batchedParameters.entrySet().iterator().next();
int nStartIdx = entry.getKey();
_batchedParameters.remove(nStartIdx);
LinkedList<InClauseBatchedParameter> parameters = entry.getValue();
if (parameters == null) {
return;
}
if (_inStatements.isEmpty()) {
for (InClauseBatchedParameter parameter : parameters) {
if (parameter == null) {
continue;
}
InClauseStatement statement = new InClauseStatement();
statement.setNextParameter(nStartIdx, parameter);
_inStatements.add(statement);
}
}
else {
Collection<InClauseStatement> collNewStatements = new ArrayList<InClauseStatement>();
Iterator<InClauseStatement> iter = _inStatements.iterator();
while (iter.hasNext()) {
InClauseStatement existingStatement = iter.next();
if (existingStatement == null) {
continue;
}
// we need to clone the statement because we'll add a parameter to the first statement,
// but we need the original to add other combinations of the parameters
InClauseStatement clonedStatement = existingStatement.clone();
boolean bFirst = true;
for (InClauseBatchedParameter parameter : parameters) {
if (parameter == null) {
continue;
}
/*
* if there's only 1 parameter, we can just add that parameter to the existing statements for additional parameters, we
* need to build all of the remaining combinations for the new parameter we the existing statements
*/
if (bFirst) {
existingStatement.setNextParameter(nStartIdx, parameter);
bFirst = false;
continue;
}
else {
InClauseStatement newStatement = clonedStatement.clone();
newStatement.setNextParameter(nStartIdx, parameter);
collNewStatements.add(newStatement);
}
}
}
_inStatements.addAll(collNewStatements);
}
buildInClauseStatements(_batchedParameters, _inStatements);
}
/**
* Method to reset the builder
*/
public void reset() {
for (InClause clause : inClauses.values()) {
if (clause != null) {
clause.reset();
}
}
inClauses.clear();
maxBatchSize = 0;
}
}

View File

@@ -0,0 +1,87 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.util.Iterator;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
/**
* Class that represents the data (column display and values) associated with an in-clause column Example: Batch Size: 2
* select * from table where (record_id, str_val) in (('1','2'),('3','4')) InClauseColumn is an abstract representation
* for the pieces of information about a specific column within the in-clause: InClauseColumn Column Display: record_id
* Parameters: '1', '3' InClauseColumn Column Display: str_val Parameters: '2', '4'
*/
public class InClauseColumn {
private final String sqlColumnDisplay;
private final Queue<? extends PreparedParameter> parameters;
private final int size;
private Iterator<? extends PreparedParameter> currentParam;
/**
* Default Constructor
*
* @param _sqlColumnDisplay
* - String representing the column's display. Note that this may not just be the column name, but may also
* represent the column's alias: 'record_id' or 't.record_id'.
* @param _parameters
* - a {@link Queue} of {@link PreparedParameter}.
*/
public InClauseColumn(String _sqlColumnDisplay, final Queue<? extends PreparedParameter> _parameters) {
sqlColumnDisplay = _sqlColumnDisplay;
if (_parameters != null) {
parameters = _parameters;
size = parameters.size();
currentParam = parameters.iterator();
}
else {
parameters = null;
size = 0;
currentParam = null;
}
}
/**
* @return String representing the column's display
*/
public String getColumnDisplay() {
return sqlColumnDisplay;
}
/**
* @return boolean; true if there are remaining {@link PreparedParameter}s in the queue
*/
public boolean hasNextParameter() {
if (parameters == null)
return false;
return currentParam.hasNext();
}
/**
* @return the next {@link PreparedParameter}s in the queue; may return null if queue is empty
*/
public PreparedParameter getNextParameter() {
if (currentParam == null)
return null;
return currentParam.next();
}
/**
* @return Integer representing the initial size of the queue (not current size)
*/
public int getParameterCnt() {
return size;
}
/**
* Method that allows you re-use the column. This method will reset the {@link #hasNextParameter()} and
* {@link #getNextParameter()} methods.
*/
public void reset() {
if (parameters != null)
currentParam = parameters.iterator();
else
currentParam = null;
}
}

View File

@@ -0,0 +1,211 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
/**
* Class that represents an instance of batched parameters for all {@link InClause}s that are part of an SQL statement.
* Example: Batch Size: 2 select * from table where (record_id) in ('A','B','C') and str_val in('Y','Z') and dbl_val
* in(1, 2, 3) In this case, we will end up with 4 distinct statements to represent the data within the 3 in-clauses:
* Statement 1: Query: select * from table where (record_id) in (?,?) and str_val = in(?,?) and dbl_val in(?,?)
* Parameters: 1. Collection<PreparedParameter>: Parameters: 'A', 'B' 2. Collection<PreparedParameter>: Parameters: 'Y',
* 'Z' 3. Collection<PreparedParameter>: Parameters: 1, 2 Statement 2: Query: select * from table where record_id = ?
* and str_val = in(?,?) and dbl_val in(?,?) Parameters: 1. Collection<PreparedParameter>: Parameters: 'C' 2.
* Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3. Collection<PreparedParameter>: Parameters: 1, 2 Statement 3:
* Query: select * from table where (record_id) in (?,?) and str_val = in(?,?) and dbl_val = ? Parameters: 1.
* Collection<PreparedParameter>: Parameters: 'A', 'B' 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3.
* Collection<PreparedParameter>: Parameters: 3 Statement 4: Query: select * from table where (record_id) = ? and
* str_val = in(?,?) and dbl_val = ? Parameters: 1. Collection<PreparedParameter>: Parameters: 'C' 2.
* Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3. Collection<PreparedParameter>: Parameters: 3
*/
public class InClauseStatement implements Cloneable {
public static final String IN_CLAUSE_EXPRESSION = "{in}";
private static final String IN_CLAUSE_REGEX = "\\{in\\}";
private static final String QUESTION_MARK = "?";
private static final String COMMA = ",";
private static final String LEFT_PARANTHESIS = "(";
private static final String RIGHT_PARANTHESIS = ")";
private static final String IN_STATEMENT = " in ";
private static final String EQUALS = " = ";
private static final String SQL_NOT_EQUAL = "0 = 1";
private static final String SQL_EQUAL = "1 = 1";
private Map<Integer, InClauseBatchedParameter> parameters = new HashMap<Integer, InClauseBatchedParameter>();
/**
* Method that sets a {@link InClauseBatchedParameter} (representing the data from an {@link InClause} for a specific
* index
*
* @param _startIdx
* - Integer representing the {@link InClause}'s index
* @param _parameter
* - {@link InClauseBatchedParameter} (representing the data from an {@link InClause}
*/
void setNextParameter(int _startIdx, InClauseBatchedParameter _parameter) {
if (_parameter == null)
return;
parameters.put(_startIdx, _parameter);
}
/**
* {@inheritDoc}
*/
public InClauseStatement clone() {
InClauseStatement statement = new InClauseStatement();
Iterator<Entry<Integer, InClauseBatchedParameter>> iter = parameters.entrySet().iterator();
while (iter.hasNext()) {
Entry<Integer, InClauseBatchedParameter> entry = iter.next();
if (entry == null)
continue;
statement.setNextParameter(entry.getKey(), entry.getValue());
}
return statement;
}
/**
* @return a Map of index to {@link InClauseBatchedParameter} (representing the data from an {@link InClause}
*/
Map<Integer, InClauseBatchedParameter> getParameters() {
return parameters;
}
/**
* Method to generate the column display for an in-clause. Depending on the number of columns, the format may vary: 1
* column: (column) 2 or more columns: (column_one, column_two)
*
* @param _arrColumns
* - An Array of {@link InClauseColumn}s needed to build the display
* @return String representing the in-clause output
*/
String buildColumnDisplay(InClauseColumn[] _arrColumns) {
StringBuilder sbColumns = new StringBuilder();
for (InClauseColumn column : _arrColumns) {
if (column == null)
continue;
if (sbColumns.length() > 0)
sbColumns.append(COMMA);
sbColumns.append(column.getColumnDisplay());
}
StringBuilder sbDisplay = new StringBuilder();
sbDisplay.append(LEFT_PARANTHESIS);
sbDisplay.append(sbColumns.toString());
sbDisplay.append(RIGHT_PARANTHESIS);
sbDisplay.append(IN_STATEMENT);
return sbDisplay.toString();
}
/**
* Generates a column display for special cases where there is only 1 column. <strong>Special Case:</strong> A single
* {@link InClauseColumn} with a single {@link PreparedParameter}s is set. In this situation, the in-clause will be
* optimized to be an equals (i.e. = ? and not in (?)). The string will format to: column =
*
* @param _columns
* @return
*/
String buildEqualDisplay(InClauseColumn[] _columns) {
StringBuilder sbDisplay = new StringBuilder();
int nLength = _columns.length;
// special case: if there is only 1 column with 1 value in the parameter, we will format the
// select as column = ?, rather than column in (?)
if (nLength == 1) {
sbDisplay.append(_columns[0].getColumnDisplay());
sbDisplay.append(EQUALS);
}
return sbDisplay.toString();
}
/**
* Method to generate a dynamic SQL-query from an SQL query using in-clause delimiters (i.e {in}). Example: Batch Size:
* 2 select * from table where {in} and {in} and {in} In-Clause 1 Data: record_id - 'A','B','C' In-Clause 2 Data:
* str_val - 'Y','Z' In-Clause 3 Data: dbl_val - 1, 2, 3 Will return the following formatted queries: Statement 1:
* Query: select * from table where (record_id) in (?,?) and str_val = in(?,?) and dbl_val in(?,?) Parameters: 1.
* Collection<PreparedParameter>: Parameters: 'A', 'B' 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3.
* Collection<PreparedParameter>: Parameters: 1, 2 Statement 2: Query: select * from table where record_id = ? and
* str_val = in(?,?) and dbl_val in(?,?) Parameters: 1. Collection<PreparedParameter>: Parameters: 'C' 2.
* Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3. Collection<PreparedParameter>: Parameters: 1, 2 Statement 3:
* Query: select * from table where (record_id) in (?,?) and str_val = in(?,?) and dbl_val = ? Parameters: 1.
* Collection<PreparedParameter>: Parameters: 'A', 'B' 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3.
* Collection<PreparedParameter>: Parameters: 3 Statement 4: Query: select * from table where (record_id) = ? and
* str_val = in(?,?) and dbl_val = ? Parameters: 1. Collection<PreparedParameter>: Parameters: 'C' 2.
* Collection<PreparedParameter>: Parameters: 'Y', 'Z' 3. Collection<PreparedParameter>: Parameters: 3
*
* @param _query
* - String representing the in-clause delimited SQL statement
* @return a String representing the dynamically modified SQL statement
*/
String formatQuery(String _query) {
if (parameters.size() == 0)
return _query;
String sModifiedQuery = _query;
Iterator<InClauseBatchedParameter> iter = parameters.values().iterator();
while (iter.hasNext()) {
InClauseBatchedParameter parameter = iter.next();
if (parameter == null)
return "";
int nColumnCnt = parameter.getColumnCnt();
int nCurColumnCnt = 0;
StringBuilder sbParameters = new StringBuilder();
// special case: if there are no parameters, check to see if we should return a true or false for the in statement
// i.e. 0 = 1 or 1 = 1
if (parameter.getBatchParameterCnt() == 0) {
if (parameter.isReturnAllIfEmpty())
sbParameters.append(SQL_EQUAL);
else
sbParameters.append(SQL_NOT_EQUAL);
}
// special case: if there is only 1 column with 1 value in the parameter, we will format the
// select as column = ?, rather than column in (?)
else if (nColumnCnt == 1 && parameter.getBatchParameterCnt() == 1) {
sbParameters.append(buildEqualDisplay(parameter.getColumns()));
sbParameters.append(QUESTION_MARK);
}
// normal replacement case
else {
sbParameters.append(buildColumnDisplay(parameter.getColumns()));
sbParameters.append(LEFT_PARANTHESIS);
if (nColumnCnt > 1)
sbParameters.append(LEFT_PARANTHESIS);
for (int nCurIdx = 0; nCurIdx < parameter.getBatchParameterCnt(); ++nCurIdx) {
if (nColumnCnt > 1 && nCurColumnCnt == nColumnCnt) {
sbParameters.append(RIGHT_PARANTHESIS);
sbParameters.append(COMMA);
sbParameters.append(LEFT_PARANTHESIS);
nCurColumnCnt = 0;
}
else if (nCurIdx > 0)
sbParameters.append(COMMA);
sbParameters.append(QUESTION_MARK);
++nCurColumnCnt;
}
if (nColumnCnt > 1)
sbParameters.append(RIGHT_PARANTHESIS);
sbParameters.append(RIGHT_PARANTHESIS);
}
sModifiedQuery = sModifiedQuery.replaceFirst(IN_CLAUSE_REGEX, sbParameters.toString());
}
return sModifiedQuery;
}
}

View File

@@ -0,0 +1,507 @@
package com.lanternsoftware.util.dao.jdbc.preparedinstatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.AbstractJdbcProxy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedBoolean;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedByte;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedBytes;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedDouble;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedEnum;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedFloat;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedInBatchedParameter;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedInt;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedLong;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedNull;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedObject;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedParameter;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedShort;
import com.lanternsoftware.util.dao.jdbc.preparedparameter.PreparedString;
/**
* Class that will execute batched {@link PreparedStatement}s supporting {@link InClause}(s).
* <p />
* An in-clause can be defined using an in-clause delimiter: <b>{in}</b>. This delimiter will be replaced by the
* appropriate column displays and values. Because the number of parameters within each in-clause is dynamic, the query
* will be restructured every time it's executed to reflect the correct number of ?-delimiters for parameters. This
* syntax supports both single and multi-column in-clauses as well as multiple in-clauses per select. The batch size for
* the overall SQL statement may be customized. Depending on the number of columns and parameters that are passed in for
* each in-clause, several special cases may occur:
* <p />
* <strong>Special Case 1:</strong> The batch-size is less than the number of {@link InClauseColumn}s In this situation,
* the batch size will be adjusted to the number of columns. For example, if you set the batch size to 1 and you pass in
* 2 columns, your optimized batch size will be 2. <br />
* <strong>Special Case 2:</strong> No {@link PreparedParameter}s are set on any {@link InClauseColumn}s In this
* situation, the behavior will depend on the ReturnAllIfEmpty flag passed in during the constructor. If true, the
* in-clause will be replaced with a <code>'1=1'</code>, meaning all rows would be returned; false will replace with
* <code>'1=0'</code>. <br />
* <strong>Special Case 3:</strong> A single {@link InClauseColumn} with a single {@link PreparedParameter}s is set. In
* this situation, the in-clause will be optimized to be an equals (i.e. <code>=?</code> and not <code>in(?)</code>).
* <p />
* Some thought must be given when constructing queries and using this utility. If you are using a single in-clause, the
* parameters will be batched into optimized sizes (i.e. if you pass in a list of 76, it may be batched to a list of
* 100). As a result, the resulting {@link ResultSet}s may not produce unique rows across multiple {@link #next()}
* calls.
* <p />
* If you are using multiple in-clauses, you may end up executing several queries to accommodate all combinations of
* parameters. This can be minimized by using appropriate batch-sizes (typically the default). Using or-statements
* between in-clauses may produce multiple queries for the same data (the queries will now be batched). <br />
* <strong>Example:</strong> <br />
* Batch Size: 2 <br />
* Original Query:
* <code>select * from table where (record_id) in ('A','B','C') and str_val in('Y','Z') and dbl_val in(1, 2, 3)</code>
* <br />
* In-Clause Delimited Query: <code>select * from table where {in} and {in} and {in}</code> <br />
* <br />
* In this case, we will end up with 4 distinct statements to represent the data within the 3 in-clauses: <br />
* Statement 1: <br />
* Query: <code>select * from table where (record_id) in (?,?) and str_val = in(?,?) and dbl_val in(?,?)</code> <br />
* Parameters: <br />
* 1. Collection<PreparedParameter>: Parameters: 'A', 'B' <br />
* 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' <br />
* 3. Collection<PreparedParameter>: Parameters: 1, 2 <br />
* <br />
* Statement 2: <br />
* Query: <code>select * from table where record_id = ? and str_val = in(?,?) and dbl_val in(?,?)</code> <br />
* Parameters: <br />
* 1. Collection<PreparedParameter>: Parameters: 'C' <br />
* 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' <br />
* 3. Collection<PreparedParameter>: Parameters: 1, 2 <br />
* <br />
* Statement 3: <br />
* Query: <code>select * from table where (record_id) in (?,?) and str_val = in(?,?) and dbl_val = ?</code> <br />
* Parameters: <br />
* 1. Collection<PreparedParameter>: Parameters: 'A', 'B' <br />
* 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' <br />
* 3. Collection<PreparedParameter>: Parameters: 3 <br />
* <br />
* Statement 4: <br />
* Query: <code>select * from table where (record_id) = ? and str_val = in(?,?) and dbl_val = ?</code> <br />
* Parameters: <br />
* 1. Collection<PreparedParameter>: Parameters: 'C' <br />
* 2. Collection<PreparedParameter>: Parameters: 'Y', 'Z' <br />
* 3. Collection<PreparedParameter>: Parameters: 3
* <p />
* <strong>A Note on {@link PreparedParameter}s:</strong> The current functionality of PreparedInStatement (like the
* current SQL standards) does not support null-parameters in a "in()" query. <br />
* i.e. <code>select * from table where (record_id) in ('1', '2', NULL)</code> would not get you rows where the
* record_id is null. <br />
* To do this you would need to manually format your query to something like:
* <code>select * from table where record_id IS NULL or {in}</code>, which would then get translated by
* PreparedInStatement to: <code>select * from table where record_id IS NULL or (record_id) in ('1', '2')</code>.
*/
public class PreparedInStatement {
private static final Logger LOG = LoggerFactory.getLogger(PreparedInStatement.class);
private final Map<Integer, PreparedParameter> m_mapParameters = new HashMap<Integer, PreparedParameter>();
private final InClauseBuilder clauseBuilder = new InClauseBuilder();
private final AbstractJdbcProxy proxy;
private final Connection connection;
private final String query;
private ResultSet resultSet;
private Collection<InClauseStatement> statements;
/**
* Default Constructor
*
* @param _query
* - SQL query to execute
* @param _proxy
* - {@link AbstractJdbcProxy} that should be used to retrieve the {@link PreparedStatement}s.
* @throws SQLException
*/
public PreparedInStatement(String _query, AbstractJdbcProxy _proxy) throws SQLException {
query = _query;
proxy = _proxy;
connection = proxy.getConnection();
statements = null;
if (proxy == null)
throw new SQLException("invalid_connection");
}
/**
* Sets the designated parameter to the given {@link Boolean} value. The driver converts this to an SQL BIT value when
* it sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link Boolean} value
*/
public void setBoolean(int _parameterIndex, boolean _val) {
setParameter(_parameterIndex, new PreparedBoolean(_val));
}
/**
* Sets the designated parameter to the given {@link byte}. The driver converts this to an SQL TINYINT value when it
* sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _byte
* - {@link byte} value
*/
public void setByte(int _parameterIndex, byte _byte) {
setParameter(_parameterIndex, new PreparedByte(_byte));
}
/**
* Sets the designated parameter to the given bytes. The driver converts this to an SQL VARBINARY or
* LONGVARBINARY (depending on the argument's size relative to the driver's limits on VARBINARY values) when it sends it
* to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _bytes
* - values
*/
public void setBytes(int _parameterIndex, byte[] _bytes) {
setParameter(_parameterIndex, new PreparedBytes(_bytes));
}
/**
* Sets the designated parameter to the given Java double value. The driver converts this to an SQL DOUBLE value when it
* sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link double} value
*/
public void setDouble(int _parameterIndex, double _val) {
setParameter(_parameterIndex, new PreparedDouble(_val));
}
/**
* Sets the designated parameter to the given Java float value. The driver converts this to an SQL FLOAT value when it
* sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link float} value
*/
public void setFloat(int _parameterIndex, float _val) {
setParameter(_parameterIndex, new PreparedFloat(_val));
}
/**
* Sets the designated parameter to the given Java int value. The driver converts this to an SQL INTEGER value when it
* sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link int} value
*/
public void setInt(int _parameterIndex, int _val) {
setParameter(_parameterIndex, new PreparedInt(_val));
}
/**
* Sets the designated parameter to the given Java long value. The driver converts this to an SQL BIGINT value when it
* sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link long} value
*/
public void setLong(int _parameterIndex, long _val) {
setParameter(_parameterIndex, new PreparedLong(_val));
}
/**
* Sets the designated parameter to SQL NULL.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _sqlType
* - Integer representing the SQL type code defined in {@link java.sql.Types}
*/
public void setNull(int _parameterIndex, int _sqlType) {
setParameter(_parameterIndex, new PreparedNull(_sqlType));
}
/**
* Sets the designated parameter to SQL NULL. This version of the method setNull should be used for user-defined types
* and REF type parameters. Examples of user-defined types include: STRUCT, DISTINCT, JAVA_OBJECT, and named array
* types. <strong>Note:</strong> To be portable, applications must give the SQL type code and the fully-qualified SQL
* type name when specifying a NULL user-defined or REF parameter. In the case of a user-defined type the name is the
* type name of the parameter itself. For a REF parameter, the name is the type name of the referenced type. If a JDBC
* driver does not need the type code or type name information, it may ignore it. Although it is intended for
* user-defined and Ref parameters, this method may be used to set a null parameter of any JDBC type. If the parameter
* does not have a user-defined or REF type, the given typeName is ignored.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _sqlType
* - Integer representing the SQL type code defined in {@link java.sql.Types}
* @param _typeName
* - String representing the fully-qualified name of an SQL user-defined type; ignored if the parameter is
* not a user-defined type or REF
*/
public void setNull(int _parameterIndex, int _sqlType, String _typeName) {
setParameter(_parameterIndex, new PreparedNull(_sqlType, _typeName));
}
/**
* Sets the designated parameter to the given Java short value. The driver converts this to an SQL SMALLINT value when
* it sends it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link short} value
*/
public void setShort(int _parameterIndex, short _val) {
setParameter(_parameterIndex, new PreparedShort(_val));
}
/**
* Sets the designated parameter to the given {@link String} value. The driver converts this to an SQL VARCHAR or
* LONGVARCHAR value (depending on the argument's size relative to the driver's limits on VARCHAR values) when it sends
* it to the database.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _val
* - {@link String} value
*/
public void setString(int _parameterIndex, String _val) {
setParameter(_parameterIndex, new PreparedString(_val));
}
public void setObject(int _parameterIndex, Object _object) {
setParameter(_parameterIndex, new PreparedObject(_object));
}
public void setParameter(int _parameterIndex, PreparedParameter _parameter) {
m_mapParameters.put(_parameterIndex, _parameter);
}
/**
* Sets a Collection of {@link InClauseColumn}s, making up an in-clause. The parameters contained in the
* {@link InClauseColumn}s will be batched appropriately before execution.
* <p />
* Note that if the list of parameters (contained within the {@link InClauseColumn}s are empty), the in-clause will be
* replaced with a '1 = 0'.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _columns
* - a variable list of {@link InClauseColumn}s that make up the in-clause
* @throws SQLException
* If at least one InClauseColumn is not given or if the size of each InClauseColumn parameter count is not
* the same.
*/
public void setInClause(int _parameterIndex, InClauseColumn... _columns) throws SQLException {
setInClause(_parameterIndex, false, _columns);
}
@SuppressWarnings("rawtypes")
public void setInClause(int _parameterIndex, String _columnDisplay, Collection<Object> _values) throws SQLException {
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Object value : _values) {
if (value instanceof String)
queueParameters.add(new PreparedString((String) value, false));
else if (value instanceof Boolean)
queueParameters.add(new PreparedBoolean((Boolean) value));
else if (value instanceof Byte)
queueParameters.add(new PreparedByte((Byte) value));
else if (value instanceof Double)
queueParameters.add(new PreparedDouble((Double) value));
else if (value instanceof Enum)
queueParameters.add(new PreparedEnum((Enum) value));
else if (value instanceof Float)
queueParameters.add(new PreparedFloat((Float) value));
else if (value instanceof Integer)
queueParameters.add(new PreparedInt((Integer) value));
else if (value instanceof Long)
queueParameters.add(new PreparedLong((Long) value));
else if (value instanceof Short)
queueParameters.add(new PreparedShort((Short) value));
}
setInClause(_parameterIndex, false, new InClauseColumn(_columnDisplay, queueParameters));
}
/**
* Sets a Collection of {@link InClauseColumn}s, making up an in-clause. The parameters contained in the
* {@link InClauseColumn}s will be batched appropriately before execution.
*
* @param _parameterIndex
* - Integer representing the index of the parameter in the SQL {@link PreparedStatement}
* @param _returnAllIfEmpty
* - a boolean flag that determines the resulting behavior if the {@link InClauseColumn}s are empty. If true,
* the in-clause will be replaced with a '1=1', meaning all rows would be returned; false will replace with
* '1=0'
* @param _columns
* - a variable list of {@link InClauseColumn}s that make up the in-clause
* @throws SQLException
* If at least one InClauseColumn is not given or if the size of each InClauseColumn parameter count is not
* the same.
*/
public void setInClause(int _parameterIndex, boolean _returnAllIfEmpty, InClauseColumn... _columns) throws SQLException {
if (_columns == null || (_columns.length > 0 && _columns[0] == null)) {
throw new SQLException("invalid_in_clause_columns");
}
int size = 0;
if (_columns.length > 0) {
size = _columns[0].getParameterCnt();
for (InClauseColumn column : _columns) {
if (column == null)
continue;
if (size != column.getParameterCnt())
throw new SQLException("invalid_parameter_cnt");
}
}
clauseBuilder.addClause(new InClause(_parameterIndex, _returnAllIfEmpty, _columns));
}
/**
* Method which defines a maximum batch size used to calculate in-clause batch sizes.
*
* @param _batchSize
*/
public void setMaxBatchSize(int _batchSize) {
clauseBuilder.setMaxBatchSize(_batchSize);
}
/**
* Method which tells if there are remaining batched {@link PreparedStatement}s left to be executed.
*
* @return boolean; true if there are more {@link PreparedStatement}s left to be executed.
*/
public boolean hasNext() {
if (statements == null)
statements = clauseBuilder.buildStatements();
return !statements.isEmpty();
}
private PreparedStatement getNextPopulatedStatement() throws SQLException {
if (statements == null)
throw new SQLException("invalid_statements");
String sQuery = "";
if (!statements.isEmpty()) {
InClauseStatement inStatement = statements.iterator().next();
if (inStatement != null) {
sQuery = inStatement.formatQuery(query);
Map<Integer, InClauseBatchedParameter> mapParameters = inStatement.getParameters();
if (mapParameters != null && !mapParameters.isEmpty()) {
Iterator<Entry<Integer, InClauseBatchedParameter>> iter = mapParameters.entrySet().iterator();
while (iter.hasNext()) {
Entry<Integer, InClauseBatchedParameter> entry = iter.next();
if (entry == null)
continue;
m_mapParameters.put(entry.getKey(), new PreparedInBatchedParameter(entry.getValue()));
}
}
statements.remove(inStatement);
}
}
else
sQuery = query;
PreparedStatement statement = proxy.getPreparedStatement(connection, sQuery);
int idx = 1;
Iterator<PreparedParameter> iter = m_mapParameters.values().iterator();
while (iter.hasNext()) {
PreparedParameter parameter = iter.next();
if (parameter == null)
continue;
idx = parameter.addToStatement(idx, statement);
}
return statement;
}
/**
* Method that will call executeQuery() the next batched {@link PreparedStatement}. This method will also close the
* previous {@link ResultSet} that was returned. Note that the consumer is responsible for closing the last
* {@link ResultSet} (when next() is not called again). It's important to remember that this will execute the current
* batched {@link PreparedStatement}; others will be executed. Therefore, you are not guaranteed that the data being
* returned is unique across multiple calls to next(). It is required to call {@link #hasNext()} before executing this
* method, otherwise a {@link SQLException} will be thrown.
*
* @return the next {@link ResultSet} to be evaluated
* @throws SQLException
* If a database access error occurs or the SQL statement is invalid.
*/
public ResultSet next() throws SQLException {
if (resultSet != null)
resultSet.close();
PreparedStatement statement = null;
try {
statement = getNextPopulatedStatement();
resultSet = statement.executeQuery();
statement.clearWarnings();
statement.clearBatch();
statement.clearParameters();
return resultSet;
}
catch (SQLException e) {
if (connection != null)
connection.close();
if (statement != null)
statement.close();
if (resultSet != null)
resultSet.close();
throw e;
}
}
/**
* Method that will call executeUpdate() on all batched {@link PreparedStatement}s.
*
* @return an Integer representing the affected row count (across all batched {@link PreparedStatement}s).
* @throws SQLException
* If a database access error occurs or the SQL statement is invalid.
*/
public int executeUpdate() throws SQLException {
int rowCnt = 0;
while (hasNext()) {
PreparedStatement statement = getNextPopulatedStatement();
rowCnt += statement.executeUpdate();
statement.close();
}
connection.close();
return rowCnt;
}
/**
* Method that will clear out any remaining {@link PreparedStatement}s as well as close the last-used {@link ResultSet}.
* Clear should be called if the statement is being reused, in between executions.
*
* @throws SQLException
*/
public void clear() throws SQLException {
try {
statements = null;
clauseBuilder.reset();
m_mapParameters.clear();
if (resultSet != null)
resultSet.close();
resultSet = null;
}
catch (Throwable t) {
}
}
}

View File

@@ -0,0 +1,55 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Boolean}s
*/
public class PreparedBoolean implements PreparedParameter {
private final Boolean val;
public PreparedBoolean(Boolean _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setBoolean(_startIdx, val);
else
_statement.setNull(_startIdx, Types.TINYINT);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Boolean}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Boolean}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Boolean> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Boolean value : _values)
queueParameters.add(new PreparedBoolean(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,61 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Byte}
*/
public class PreparedByte implements PreparedParameter {
private final Byte val;
/**
* Default Constructor
*
* @param _val
* - {@link Byte}
*/
public PreparedByte(Byte _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setByte(_startIdx, val);
else
_statement.setNull(_startIdx, Types.TINYINT);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Byte}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Byte}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Byte> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Byte value : _values)
queueParameters.add(new PreparedByte(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,61 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link byte}s
*/
public class PreparedBytes implements PreparedParameter {
private final byte[] val;
/**
* Default Constructor
*
* @param _val
* - {@link byte}s
*/
public PreparedBytes(byte[] _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setBytes(_startIdx, val);
else
_statement.setNull(_startIdx, Types.VARBINARY);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link byte}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link byte}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<byte[]> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (byte[] value : _values)
queueParameters.add(new PreparedBytes(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,61 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Double}s
*/
public class PreparedDouble implements PreparedParameter {
private final Double val;
/**
* Default Constructor
*
* @param _val
* - {@link Double}
*/
public PreparedDouble(Double _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setDouble(_startIdx, val);
else
_statement.setNull(_startIdx, Types.DOUBLE);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Double}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Double}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Double> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Double value : _values)
queueParameters.add(new PreparedDouble(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,75 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link String}s
*/
public class PreparedEnum implements PreparedParameter {
private final Enum<?> val;
public PreparedEnum(Enum<?> _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setString(_startIdx, val.name());
else
_statement.setNull(_startIdx, Types.VARCHAR);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link String}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* - Collection of {@link String}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Enum<?>> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Enum<?> value : _values)
queueParameters.add(new PreparedEnum(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link String}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* - Collection of {@link String}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, EnumSet<?> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Enum<?> value : _values)
queueParameters.add(new PreparedEnum(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,61 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Float}s
*/
public class PreparedFloat implements PreparedParameter {
private final Float val;
/**
* Default Constructor
*
* @param _val
* - {@link Float}
*/
public PreparedFloat(Float _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setFloat(_startIdx, val);
else
_statement.setNull(_startIdx, Types.FLOAT);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Float}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Float}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Float> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Float value : _values)
queueParameters.add(new PreparedFloat(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,46 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.LinkedList;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseBatchedParameter;
/**
* Implementation of {@link PreparedParameter} for {@link InClauseBatchedParameter}s
*/
public class PreparedInBatchedParameter implements PreparedParameter {
private final InClauseBatchedParameter batchedParameter;
/**
* Default Constructor
*
* @param _parameter
* - {@link InClauseBatchedParameter}
*/
public PreparedInBatchedParameter(InClauseBatchedParameter _parameter) {
batchedParameter = _parameter;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null || batchedParameter == null)
return _startIdx;
LinkedList<PreparedParameter> listParameters = batchedParameter.getParameters();
if (listParameters == null || listParameters.isEmpty())
return _startIdx;
int nIdx = _startIdx;
for (PreparedParameter parameter : listParameters) {
if (parameter == null)
continue;
parameter.addToStatement(nIdx, _statement);
++nIdx;
}
return nIdx;
}
}

View File

@@ -0,0 +1,85 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Integer}s
*/
public class PreparedInt implements PreparedParameter {
private final Integer val;
/**
* Default Constructor
*
* @param _val
* - {@link Integer}
*/
public PreparedInt(Integer _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setInt(_startIdx, val);
else
_statement.setNull(_startIdx, Types.INTEGER);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Integer}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Integer}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Integer> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Integer value : _values)
queueParameters.add(new PreparedInt(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Enum}s. The Integer value will be
* generated for each enum by using the enum's {@link Enum#ordinal()} value.
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Enum}s
* @return {@link InClauseColumn} containing integers
*/
public static InClauseColumn getInClauseEnums(String _columnDisplay, Collection<? extends Enum<?>> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Enum<?> value : _values) {
if (value != null)
queueParameters.add(new PreparedInt(value.ordinal()));
else
queueParameters.add(new PreparedInt(null));
}
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,96 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.GregorianCalendar;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Long}s
*/
public class PreparedLong implements PreparedParameter {
private final Long val;
/**
* Constructor taking a long value
*
* @param _val
* - {@link Long}
*/
public PreparedLong(Long _val) {
val = _val;
}
/**
* Constructor taking a GregorianCalendar that will be converted to a long using the
* {@link GregorianCalendar#getTimeInMillis()} value.
*
* @param _cal
* - {@link GregorianCalendar}
*/
public PreparedLong(GregorianCalendar _cal) {
if (_cal == null)
val = null;
else
val = _cal.getTimeInMillis();
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setLong(_startIdx, val);
else
_statement.setNull(_startIdx, Types.BIGINT);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Long}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Long}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Long> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Long value : _values)
queueParameters.add(new PreparedLong(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link GregorianCalendar}s. The
* GregorianCalendar will be converted to a {@link Long} using the {@link GregorianCalendar#getTimeInMillis()} value.
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link GregorianCalendar}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClauseCalendars(String _columnDisplay, Collection<GregorianCalendar> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (GregorianCalendar value : _values)
queueParameters.add(new PreparedLong(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,53 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
* Implementation of {@link PreparedParameter} for SQL-Nulls
*/
public class PreparedNull implements PreparedParameter {
private final int sqlType;
private final String typeName;
/**
* Default Constructor
*
* @param _sqlType
* - Integer representing the SQL type code defined in {@link java.sql.Types}
*/
public PreparedNull(int _sqlType) {
sqlType = _sqlType;
typeName = null;
}
/**
* Type Name Constructor
*
* @param _sqlType
* - Integer representing the SQL type code defined in {@link java.sql.Types}
* @param _typeName
* - String representing the fully-qualified name of an SQL user-defined type; ignored if the parameter is
* not a user-defined
*/
public PreparedNull(int _sqlType, String _typeName) {
sqlType = _sqlType;
typeName = _typeName;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (typeName == null)
_statement.setNull(_startIdx, sqlType);
else
_statement.setNull(_startIdx, sqlType, typeName);
return ++_startIdx;
}
}

View File

@@ -0,0 +1,43 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Short}s
*/
public class PreparedObject implements PreparedParameter {
private final Object m_val;
public PreparedObject(Object _val) {
m_val = _val;
}
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (m_val != null)
_statement.setObject(_startIdx, m_val);
else
_statement.setNull(_startIdx, Types.OTHER);
return ++_startIdx;
}
public static InClauseColumn getInClause(String _columnDisplay, Collection<Object> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Object value : _values)
queueParameters.add(new PreparedObject(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,22 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
* Interface representing a {@link PreparedStatement}'s parameters
*/
public interface PreparedParameter {
/**
* Method that will build add the parameter to a {@link PreparedStatement}
*
* @param _startIdx
* - integer representing the starting index of the parameter
* @param _statement
* - {@link PreparedStatement}
* @return an int, representing the ending index of the parameter
* @throws {@link
* SQLException}
*/
int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException;
}

View File

@@ -0,0 +1,61 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link Short}s
*/
public class PreparedShort implements PreparedParameter {
private final Short val;
/**
* Default Constructor
*
* @param _val
* - {@link Short}
*/
public PreparedShort(Short _val) {
val = _val;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null)
_statement.setShort(_startIdx, val);
else
_statement.setNull(_startIdx, Types.SMALLINT);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link Short}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* Collection of {@link Short}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<Short> _values) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (Short value : _values)
queueParameters.add(new PreparedShort(value));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1,101 @@
package com.lanternsoftware.util.dao.jdbc.preparedparameter;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Locale;
import java.util.Queue;
import java.util.regex.Pattern;
import com.lanternsoftware.util.dao.jdbc.preparedinstatement.InClauseColumn;
/**
* Implementation of {@link PreparedParameter} for {@link String}s
*/
public class PreparedString implements PreparedParameter {
private final String val;
private final boolean isKey;
private static final Pattern KEY_PATTERN = Pattern.compile("[^A-Za-z0-9]+");
/**
* Default Constructor
*
* @param _val
* - {@link String}
*/
public PreparedString(String _val) {
this(_val, false);
}
/**
* Default Constructor
*
* @param _val
* - {@link String}
* @param _isKey
* - boolean; if true the {@link String} should be converted to upper case alpha numeric before being saved
* the {@link PreparedStatement}
*/
public PreparedString(String _val, boolean _isKey) {
val = _val;
isKey = _isKey;
}
/**
* {@inheritDoc}
*/
public int addToStatement(int _startIdx, PreparedStatement _statement) throws SQLException {
if (_statement == null)
return _startIdx;
if (val != null) {
if (isKey) {
String upper = val.toUpperCase(Locale.ENGLISH);
_statement.setString(_startIdx, KEY_PATTERN.matcher(upper).replaceAll(""));
}
else
_statement.setString(_startIdx, val);
}
else
_statement.setNull(_startIdx, Types.VARCHAR);
return ++_startIdx;
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link String}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* - Collection of {@link String}s
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<String> _values) {
return getInClause(_columnDisplay, _values, false);
}
/**
* Static method to produce an {@link InClauseColumn} for a Collection of {@link String}s
*
* @param _columnDisplay
* - {@link InClauseColumn} display
* @param _values
* - Collection of {@link String}s
* @param _isKeyColumn
* - boolean; if true the {@link String} should be converted to upper case alpha numeric before being saved
* Collection
* @return {@link InClauseColumn}
*/
public static InClauseColumn getInClause(String _columnDisplay, Collection<String> _values, boolean _isKeyColumn) {
if (_values == null)
return null;
Queue<PreparedParameter> queueParameters = new LinkedList<PreparedParameter>();
for (String value : _values)
queueParameters.add(new PreparedString(value, _isKeyColumn));
return new InClauseColumn(_columnDisplay, queueParameters);
}
}

View File

@@ -0,0 +1 @@
com.lanternsoftware.util.dao.jdbc.dao.JdbcConfigSerializer