mirror of
https://github.com/zyphlar/LanternPowerMonitor.git
synced 2024-03-08 14:07:47 +00:00
Performance improvement for returning energy summaries from DB. Change the way MQTT values are posted. Add a BOM utf-8 char to the BOM csv, yo dawg, bom.
This commit is contained in:
parent
77ceec745c
commit
d63f6df1fd
|
@ -350,11 +350,8 @@ public class MonitorApp {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (mqttPoster != null) {
|
||||
for (BreakerPower p : mqttReadings) {
|
||||
monitor.submit(() -> mqttPoster.postPower(p));
|
||||
}
|
||||
}
|
||||
if (mqttPoster != null)
|
||||
monitor.submit(() -> mqttPoster.postPower(mqttReadings));
|
||||
if (DateUtils.diffInSeconds(new Date(), lastUpdateCheck) >= config.getUpdateInterval()) {
|
||||
lastUpdateCheck = new Date();
|
||||
monitor.submit(new UpdateChecker());
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
package com.lanternsoftware.currentmonitor;
|
||||
|
||||
import com.lanternsoftware.datamodel.currentmonitor.BreakerPower;
|
||||
import com.lanternsoftware.util.CollectionUtils;
|
||||
import com.lanternsoftware.util.NullUtils;
|
||||
import com.lanternsoftware.util.dao.DaoSerializer;
|
||||
import org.eclipse.paho.client.mqttv3.*;
|
||||
import org.eclipse.paho.client.mqttv3.IMqttClient;
|
||||
import org.eclipse.paho.client.mqttv3.MqttClient;
|
||||
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
|
||||
import org.eclipse.paho.client.mqttv3.MqttMessage;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class MqttPoster {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MqttPoster.class);
|
||||
|
||||
|
@ -15,6 +21,7 @@ public class MqttPoster {
|
|||
public MqttPoster(MonitorConfig _config) {
|
||||
IMqttClient c = null;
|
||||
try {
|
||||
LOG.info("Attempting to connect to MQTT broker at {}", _config.getMqttBrokerUrl());
|
||||
c = new MqttClient(_config.getMqttBrokerUrl(), String.format("Lantern_Power_Monitor_Hub_%d", _config.getHub()));
|
||||
MqttConnectOptions options = new MqttConnectOptions();
|
||||
options.setAutomaticReconnect(true);
|
||||
|
@ -25,21 +32,23 @@ public class MqttPoster {
|
|||
if (NullUtils.isNotEmpty(_config.getMqttPassword()))
|
||||
options.setPassword(_config.getMqttPassword().toCharArray());
|
||||
c.connect(options);
|
||||
} catch (MqttException e) {
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to create MQTT client", e);
|
||||
}
|
||||
client = c;
|
||||
}
|
||||
|
||||
public void postPower(BreakerPower _power) {
|
||||
String topic = "lantern_power_monitor/breaker_power/" + _power.getKey();
|
||||
MqttMessage msg = new MqttMessage(NullUtils.toByteArray(DaoSerializer.toJson(_power)));
|
||||
public void postPower(List<BreakerPower> _power) {
|
||||
for (BreakerPower power : CollectionUtils.makeNotNull(_power)) {
|
||||
String topic = "lantern_power_monitor/breaker_power/" + power.getKey();
|
||||
MqttMessage msg = new MqttMessage(NullUtils.toByteArray(DaoSerializer.toJson(power)));
|
||||
msg.setQos(2);
|
||||
msg.setRetained(true);
|
||||
try {
|
||||
client.publish(topic, msg);
|
||||
} catch (MqttException e) {
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to publish message to {}", topic, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,18 +20,20 @@ public class BackupMinutes {
|
|||
CurrentMonitorDao backupDao = new MongoCurrentMonitorDao(MongoConfig.fromDisk(LanternFiles.BACKUP_PATH + "mongo.cfg"));
|
||||
Date now = new Date();
|
||||
for (Account a : dao.getProxy().queryAll(Account.class)) {
|
||||
if (a.getId() == 100)
|
||||
if (a.getId() == 0)
|
||||
continue;
|
||||
DebugTimer t = new DebugTimer("Account " + a.getId());
|
||||
if (NullUtils.isEmpty(a.getTimezone())) {
|
||||
a.setTimezone("America/Chicago");
|
||||
}
|
||||
TimeZone tz = TimeZone.getTimeZone(a.getTimezone());
|
||||
// Date start = DateUtils.addDays(DateUtils.getMidnightBeforeNow(tz), -2, tz);
|
||||
HubPowerMinute minute = dao.getProxy().queryOne(HubPowerMinute.class, new DaoQuery("account_id", a.getId()), DaoSort.sort("minute"));
|
||||
if (minute == null)
|
||||
continue;
|
||||
Date minStart = DateUtils.addDays(DateUtils.getMidnightBeforeNow(tz), -60, tz);
|
||||
Date start = DateUtils.getMidnightBefore(minute.getMinuteAsDate(), tz);
|
||||
if (minStart.after(start))
|
||||
start = minStart;
|
||||
Date end = DateUtils.addDays(start, 1, tz);
|
||||
while (end.before(now)) {
|
||||
DebugTimer t2 = new DebugTimer("Account Id: " + a.getId() + " Query Day " + DateUtils.format("MM/dd/yyyy", tz, start));
|
||||
|
@ -48,5 +50,6 @@ public class BackupMinutes {
|
|||
t.stop();
|
||||
}
|
||||
dao.shutdown();
|
||||
backupDao.shutdown();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ public interface CurrentMonitorDao {
|
|||
List<BreakerPower> getBreakerPowerForAccount(int _accountId);
|
||||
BreakerPower getLatestBreakerPower(int _accountId, int _hub, int _port);
|
||||
BreakerGroupEnergy getBreakerGroupEnergy(int _accountId, String _groupId, EnergyBlockViewMode _viewMode, Date _start);
|
||||
byte[] getBreakerGroupEnergyBinary(int _accountId, String _groupId, EnergyBlockViewMode _viewMode, Date _start);
|
||||
void putBreakerGroupEnergy(BreakerGroupEnergy _energy);
|
||||
|
||||
void putHubPowerMinute(HubPowerMinute _power);
|
||||
|
|
|
@ -125,6 +125,11 @@ public class MongoCurrentMonitorDao implements CurrentMonitorDao {
|
|||
return proxy.queryOne(BreakerGroupEnergy.class, new DaoQuery("_id", BreakerGroupEnergy.toId(_accountId, _groupId, _viewMode, _start)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getBreakerGroupEnergyBinary(int _accountId, String _groupId, EnergyBlockViewMode _viewMode, Date _start) {
|
||||
return DaoSerializer.toZipBson(proxy.queryForEntity(BreakerGroupEnergy.class, new DaoQuery("_id", BreakerGroupEnergy.toId(_accountId, _groupId, _viewMode, _start))));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateSummaries(BreakerGroup _rootGroup, Set<Date> _daysToSummarize, TimeZone _tz) {
|
||||
Set<Date> monthsToSummarize = CollectionUtils.transformToSet(_daysToSummarize, _c -> DateUtils.getStartOfMonth(_c, _tz));
|
||||
|
|
|
@ -34,6 +34,6 @@ public class BomServlet extends LanternServlet {
|
|||
_rep.setStatus(401);
|
||||
return;
|
||||
}
|
||||
setResponseEntity(_rep, "text/csv",CSVWriter.toByteArray(BOM.fromConfig(config).toCsv(false)));
|
||||
setResponseEntity(_rep, "text/csv;charset=utf-8",CSVWriter.toByteArray(BOM.fromConfig(config).toCsv(false)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
package com.lanternsoftware.currentmonitor.servlet;
|
||||
|
||||
import com.lanternsoftware.currentmonitor.context.Globals;
|
||||
import com.lanternsoftware.util.dao.auth.AuthCode;
|
||||
import com.lanternsoftware.datamodel.currentmonitor.BreakerGroupEnergy;
|
||||
import com.lanternsoftware.datamodel.currentmonitor.EnergyBlockViewMode;
|
||||
import com.lanternsoftware.util.CollectionUtils;
|
||||
import com.lanternsoftware.util.NullUtils;
|
||||
import com.lanternsoftware.util.dao.auth.AuthCode;
|
||||
|
||||
import javax.servlet.annotation.WebServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -24,6 +25,13 @@ public class GroupEnergyServlet extends SecureServlet {
|
|||
}
|
||||
EnergyBlockViewMode viewMode = NullUtils.toEnum(EnergyBlockViewMode.class, path[1], EnergyBlockViewMode.DAY);
|
||||
Date start = new Date(NullUtils.toLong(path[2]));
|
||||
if ((CollectionUtils.size(_authCode.getAllAccountIds()) == 1) && NullUtils.isEqual(CollectionUtils.get(path, 3), "bin")) {
|
||||
byte[] energy = Globals.dao.getBreakerGroupEnergyBinary(CollectionUtils.getFirst(_authCode.getAllAccountIds()), path[0], viewMode, start);
|
||||
if (energy == null)
|
||||
_rep.setStatus(404);
|
||||
else
|
||||
setResponseEntity(_rep, 200, MediaType.APPLICATION_OCTET_STREAM, energy);
|
||||
}
|
||||
List<BreakerGroupEnergy> energies = CollectionUtils.transform(_authCode.getAllAccountIds(), _id->Globals.dao.getBreakerGroupEnergy(_id, path[0], viewMode, start), true);
|
||||
if (CollectionUtils.isNotEmpty(energies)) {
|
||||
BreakerGroupEnergy energy;
|
||||
|
|
|
@ -10,7 +10,7 @@ public abstract class CSVWriter {
|
|||
}
|
||||
|
||||
public static String toString(CSV _csv) {
|
||||
StringBuilder out = new StringBuilder();
|
||||
StringBuilder out = new StringBuilder("\uFEFF");
|
||||
if (CollectionUtils.isNotEmpty(_csv.getHeaders())) {
|
||||
out.append(CollectionUtils.transformToCommaSeparated(_csv.getHeaders(), _h -> "\"" + _h + "\""));
|
||||
out.append("\r\n");
|
||||
|
|
|
@ -214,12 +214,7 @@ public class MongoProxy extends AbstractDaoProxy {
|
|||
iter.skip(_offset);
|
||||
if (_count > 0)
|
||||
iter.limit(_count);
|
||||
return CollectionUtils.transform(iter, new ITransformer<Document, DaoEntity>() {
|
||||
@Override
|
||||
public DaoEntity transform(Document _document) {
|
||||
return new DaoEntity(_document);
|
||||
}
|
||||
});
|
||||
return CollectionUtils.transform(iter, DaoEntity::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -195,6 +195,26 @@ public abstract class AbstractDaoProxy implements IDaoProxy {
|
|||
return queryForEntities(_tableName, _query, _fields, _sort, 0, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query) {
|
||||
return queryForEntity(DaoSerializer.getTableName(_class, getType()), _query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query, DaoSort _sort) {
|
||||
return queryForEntity(DaoSerializer.getTableName(_class, getType()), _query, _sort);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query, Collection<String> _fields) {
|
||||
return queryForEntity(DaoSerializer.getTableName(_class, getType()), _query, _fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort) {
|
||||
return queryForEntity(DaoSerializer.getTableName(_class, getType()), _query, _fields, _sort);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DaoEntity queryForEntity(String _tableName, DaoQuery _query) {
|
||||
return CollectionUtils.getFirst(queryForEntities(_tableName, _query, null, null, 0, 1));
|
||||
|
|
|
@ -14,14 +14,12 @@ public class DaoEntity implements Map<String, Object> {
|
|||
}
|
||||
|
||||
public DaoEntity(Document _doc) {
|
||||
map = _doc == null?new Document():_doc;
|
||||
map = (_doc == null) ? new Document() : _doc;
|
||||
}
|
||||
|
||||
public DaoEntity(Map<String, ?> _map) {
|
||||
map = new Document();
|
||||
for (Entry<String, ?> e : _map.entrySet()) {
|
||||
map.put(e.getKey(), e.getValue());
|
||||
}
|
||||
map.putAll(_map);
|
||||
}
|
||||
|
||||
public DaoEntity(String _name, Object _o) {
|
||||
|
|
|
@ -35,6 +35,10 @@ public interface IDaoProxy {
|
|||
<T> Future<List<T>> queryImportantAsync(Class<T> _class, DaoQuery _query, DaoSort _sort);
|
||||
<T> List<T> queryImportant(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count);
|
||||
<T> DaoPage<T> queryImportantPage(Class<T> _class, DaoQuery _query, DaoSort _sort, int _offset, int _count);
|
||||
<T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query);
|
||||
<T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query, DaoSort _sort);
|
||||
<T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query, Collection<String> _fields);
|
||||
<T> DaoEntity queryForEntity(Class<T> _class, DaoQuery _query, Collection<String> _fields, DaoSort _sort);
|
||||
DaoEntity queryForEntity(String _tableName, DaoQuery _query);
|
||||
DaoEntity queryForEntity(String _tableName, DaoQuery _query, DaoSort _sort);
|
||||
DaoEntity queryForEntity(String _tableName, DaoQuery _query, Collection<String> _fields);
|
||||
|
|
Loading…
Reference in New Issue
Block a user