use of org.openhab.core.persistence.HistoricItem in project openhab1-addons by openhab.
the class JpaPersistenceService method query.
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("Querying for historic item: {}", filter.getItemName());
if (!JpaConfiguration.isInitialized) {
logger.warn("Trying to create EntityManagerFactory but we don't have configuration yet!");
return Collections.emptyList();
}
String itemName = filter.getItemName();
Item item = getItemFromRegistry(itemName);
String sortOrder;
if (filter.getOrdering() == Ordering.ASCENDING) {
sortOrder = "ASC";
} else {
sortOrder = "DESC";
}
boolean hasBeginDate = false;
boolean hasEndDate = false;
String queryString = "SELECT n FROM " + JpaPersistentItem.class.getSimpleName() + " n WHERE n.realName = :itemName";
if (filter.getBeginDate() != null) {
queryString += " AND n.timestamp >= :beginDate";
hasBeginDate = true;
}
if (filter.getEndDate() != null) {
queryString += " AND n.timestamp <= :endDate";
hasEndDate = true;
}
queryString += " ORDER BY n.timestamp " + sortOrder;
logger.debug("The query: " + queryString);
EntityManager em = getEntityManagerFactory().createEntityManager();
try {
// In RESOURCE_LOCAL calls to EntityManager require a begin/commit
em.getTransaction().begin();
logger.debug("Creating query...");
Query query = em.createQuery(queryString);
query.setParameter("itemName", item.getName());
if (hasBeginDate) {
query.setParameter("beginDate", filter.getBeginDate());
}
if (hasEndDate) {
query.setParameter("endDate", filter.getEndDate());
}
query.setFirstResult(filter.getPageNumber() * filter.getPageSize());
query.setMaxResults(filter.getPageSize());
logger.debug("Creating query...done");
logger.debug("Retrieving result list...");
@SuppressWarnings("unchecked") List<JpaPersistentItem> result = query.getResultList();
logger.debug("Retrieving result list...done");
List<HistoricItem> historicList = JpaHistoricItem.fromResultList(result, item);
if (historicList != null) {
logger.debug(String.format("Convert to HistoricItem: %d", historicList.size()));
}
em.getTransaction().commit();
return historicList;
} catch (Exception e) {
logger.error("Error on querying database!");
logger.error(e.getMessage(), e);
em.getTransaction().rollback();
} finally {
em.close();
}
return Collections.emptyList();
}
use of org.openhab.core.persistence.HistoricItem in project openhab1-addons by openhab.
the class InfluxDBPersistenceService method query.
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("got a query");
if (!isProperlyConfigured) {
logger.warn("Configuration for influxdb not yet loaded or broken.");
return Collections.emptyList();
}
if (!isConnected()) {
logger.warn("InfluxDB is not yet connected");
return Collections.emptyList();
}
List<HistoricItem> historicItems = new ArrayList<HistoricItem>();
StringBuffer query = new StringBuffer();
query.append("select ").append(VALUE_COLUMN_NAME).append(' ').append("from \"").append(retentionPolicy).append("\".");
if (filter.getItemName() != null) {
query.append('"').append(filter.getItemName()).append('"');
} else {
query.append("/.*/");
}
logger.trace("Filter: itemname: {}, ordering: {}, state: {}, operator: {}, getBeginDate: {}, getEndDate: {}, getPageSize: {}, getPageNumber: {}", filter.getItemName(), filter.getOrdering().toString(), filter.getState(), filter.getOperator(), filter.getBeginDate(), filter.getEndDate(), filter.getPageSize(), filter.getPageNumber());
if ((filter.getState() != null && filter.getOperator() != null) || filter.getBeginDate() != null || filter.getEndDate() != null) {
query.append(" where ");
boolean foundState = false;
boolean foundBeginDate = false;
if (filter.getState() != null && filter.getOperator() != null) {
String value = stateToString(filter.getState());
if (value != null) {
foundState = true;
query.append(VALUE_COLUMN_NAME);
query.append(" ");
query.append(filter.getOperator().toString());
query.append(" ");
query.append(value);
}
}
if (filter.getBeginDate() != null) {
foundBeginDate = true;
if (foundState) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" > ");
query.append(getTimeFilter(filter.getBeginDate()));
query.append(" ");
}
if (filter.getEndDate() != null) {
if (foundState || foundBeginDate) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" < ");
query.append(getTimeFilter(filter.getEndDate()));
query.append(" ");
}
}
if (filter.getOrdering() == Ordering.DESCENDING) {
query.append(String.format(" ORDER BY %s DESC", TIME_COLUMN_NAME));
logger.debug("descending ordering ");
}
int limit = (filter.getPageNumber() + 1) * filter.getPageSize();
query.append(" limit " + limit);
logger.trace("appending limit {}", limit);
int totalEntriesAffected = ((filter.getPageNumber() + 1) * filter.getPageSize());
int startEntryNum = totalEntriesAffected - (totalEntriesAffected - (filter.getPageSize() * filter.getPageNumber()));
logger.trace("startEntryNum {}", startEntryNum);
logger.debug("query string: {}", query.toString());
Query influxdbQuery = new Query(query.toString(), dbName);
List<Result> results = Collections.emptyList();
results = influxDB.query(influxdbQuery, timeUnit).getResults();
for (Result result : results) {
List<Series> seriess = result.getSeries();
if (result.getError() != null) {
logger.error(result.getError());
continue;
}
if (seriess == null) {
logger.debug("query returned no series");
} else {
for (Series series : seriess) {
logger.trace("series {}", series.toString());
String historicItemName = series.getName();
List<List<Object>> valuess = series.getValues();
if (valuess == null) {
logger.debug("query returned no values");
} else {
List<String> columns = series.getColumns();
logger.trace("columns {}", columns);
Integer timestampColumn = null;
Integer valueColumn = null;
for (int i = 0; i < columns.size(); i++) {
String columnName = columns.get(i);
if (columnName.equals(TIME_COLUMN_NAME)) {
timestampColumn = i;
} else if (columnName.equals(VALUE_COLUMN_NAME)) {
valueColumn = i;
}
}
if (valueColumn == null || timestampColumn == null) {
throw new RuntimeException("missing column");
}
for (int i = 0; i < valuess.size(); i++) {
Double rawTime = (Double) valuess.get(i).get(timestampColumn);
Date time = new Date(rawTime.longValue());
State value = objectToState(valuess.get(i).get(valueColumn), historicItemName);
logger.trace("adding historic item {}: time {} value {}", historicItemName, time, value);
historicItems.add(new InfluxdbItem(historicItemName, value, time));
}
}
}
}
}
return historicItems;
}
use of org.openhab.core.persistence.HistoricItem in project openhab1-addons by openhab.
the class InfluxDBPersistenceService method query.
@Override
public Iterable<HistoricItem> query(FilterCriteria filter) {
logger.debug("got a query");
if (!isProperlyConfigured) {
logger.warn("Configuration for influxdb08 not yet loaded or broken.");
return Collections.emptyList();
}
if (!isConnected()) {
logger.warn("InfluxDB is not yet connected");
return Collections.emptyList();
}
List<HistoricItem> historicItems = new ArrayList<HistoricItem>();
StringBuffer query = new StringBuffer();
query.append("select ");
query.append(VALUE_COLUMN_NAME);
query.append(", ");
query.append(TIME_COLUMN_NAME);
query.append(" ");
query.append("from ");
if (filter.getItemName() != null) {
query.append(filter.getItemName());
} else {
query.append("/.*/");
}
logger.trace("filter itemname: {}", filter.getItemName());
logger.trace("filter ordering: {}", filter.getOrdering().toString());
logger.trace("filter state: {}", filter.getState());
logger.trace("filter operator: {}", filter.getOperator());
logger.trace("filter getBeginDate: {}", filter.getBeginDate());
logger.trace("filter getEndDate: {}", filter.getEndDate());
logger.trace("filter getPageSize: {}", filter.getPageSize());
logger.trace("filter getPageNumber: {}", filter.getPageNumber());
if ((filter.getState() != null && filter.getOperator() != null) || filter.getBeginDate() != null || filter.getEndDate() != null) {
query.append(" where ");
boolean foundState = false;
boolean foundBeginDate = false;
if (filter.getState() != null && filter.getOperator() != null) {
String value = stateToString(filter.getState());
if (value != null) {
foundState = true;
query.append(VALUE_COLUMN_NAME);
query.append(" ");
query.append(filter.getOperator().toString());
query.append(" ");
query.append(value);
}
}
if (filter.getBeginDate() != null) {
foundBeginDate = true;
if (foundState) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" > ");
query.append(getTimeFilter(filter.getBeginDate()));
query.append(" ");
}
if (filter.getEndDate() != null) {
if (foundState || foundBeginDate) {
query.append(" and");
}
query.append(" ");
query.append(TIME_COLUMN_NAME);
query.append(" < ");
query.append(getTimeFilter(filter.getEndDate()));
query.append(" ");
}
}
// http://influxdb.com/docs/v0.7/api/query_language.html#select-and-time-ranges
if (filter.getOrdering() == Ordering.ASCENDING) {
query.append(" order asc");
}
int limit = (filter.getPageNumber() + 1) * filter.getPageSize();
query.append(" limit " + limit);
logger.trace("appending limit {}", limit);
int totalEntriesAffected = ((filter.getPageNumber() + 1) * filter.getPageSize());
int startEntryNum = totalEntriesAffected - (totalEntriesAffected - (filter.getPageSize() * filter.getPageNumber()));
logger.trace("startEntryNum {}", startEntryNum);
logger.debug("query string: {}", query.toString());
List<Serie> results = Collections.emptyList();
try {
results = influxDB.query(dbName, query.toString(), TimeUnit.MILLISECONDS);
} catch (RuntimeException e) {
logger.error("query failed with database error");
handleDatabaseException(e);
}
for (Serie result : results) {
String historicItemName = result.getName();
logger.trace("item name {}", historicItemName);
int entryCount = 0;
for (Map<String, Object> row : result.getRows()) {
entryCount++;
if (entryCount >= startEntryNum) {
Double rawTime = (Double) row.get(TIME_COLUMN_NAME);
Object rawValue = row.get(VALUE_COLUMN_NAME);
logger.trace("adding historic item {}: time {} value {}", historicItemName, rawTime, rawValue);
Date time = new Date(rawTime.longValue());
State value = objectToState(rawValue, historicItemName);
historicItems.add(new InfluxdbItem(historicItemName, value, time));
} else {
logger.trace("omitting item value for {}", historicItemName);
}
}
}
return historicItems;
}
use of org.openhab.core.persistence.HistoricItem in project openhab1-addons by openhab.
the class CaldavPersistenceService method findLastOn.
private CaldavItem findLastOn(String alias, State state) {
final FilterCriteria filter = new FilterCriteria();
filter.setEndDate(new Date());
filter.setItemName(alias);
filter.setOrdering(FilterCriteria.Ordering.DESCENDING);
filter.setPageSize(1);
final Iterable<HistoricItem> query = this.query(filter);
final Iterator<HistoricItem> iterator = query.iterator();
if (iterator.hasNext()) {
CaldavItem caldavItem = (CaldavItem) iterator.next();
if (!isOff(caldavItem.getState())) {
return caldavItem;
}
}
return null;
}
use of org.openhab.core.persistence.HistoricItem in project openhab1-addons by openhab.
the class CaldavPersistenceService method query.
@Override
public Iterable<HistoricItem> query(final FilterCriteria filter) {
List<CalDavEvent> events = calDavLoader.getEvents(new CalDavQuery(calendarId));
List<HistoricItem> outList = new ArrayList<HistoricItem>();
for (CalDavEvent calDavEvent : events) {
if (filter.getBeginDate() != null && calDavEvent.getEnd().toDate().before(filter.getBeginDate())) {
continue;
}
if (filter.getEndDate() != null && calDavEvent.getStart().toDate().after(filter.getEndDate())) {
continue;
}
Item item = null;
try {
item = this.itemRegistry.getItem(filter.getItemName());
} catch (ItemNotFoundException e) {
logger.error("item {} could not be found", filter.getItemName());
continue;
}
final List<EventUtils.EventContent> parseContent = EventUtils.parseContent(calDavEvent, item);
for (EventUtils.EventContent eventContent : parseContent) {
if (filter.getBeginDate() != null && eventContent.getTime().toDate().before(filter.getBeginDate())) {
continue;
}
if (filter.getEndDate() != null && eventContent.getTime().toDate().after(filter.getEndDate())) {
continue;
}
final State eventState = eventContent.getState();
if (filter.getState() != null && filter.getOperator() != null) {
switch(filter.getOperator()) {
case EQ:
{
if (!filter.getState().equals(eventState)) {
continue;
}
break;
}
case NEQ:
{
if (filter.getState().equals(eventState)) {
continue;
}
break;
}
case LTE:
{
if (eventState instanceof DecimalType && filter.getState() instanceof DecimalType) {
if (((DecimalType) eventState).longValue() > ((DecimalType) filter.getState()).longValue()) {
continue;
}
} else {
continue;
}
break;
}
case GTE:
{
if (eventState instanceof DecimalType && filter.getState() instanceof DecimalType) {
if (((DecimalType) eventState).longValue() < ((DecimalType) filter.getState()).longValue()) {
continue;
}
} else {
continue;
}
break;
}
case LT:
{
if (eventState instanceof DecimalType && filter.getState() instanceof DecimalType) {
if (((DecimalType) eventState).longValue() >= ((DecimalType) filter.getState()).longValue()) {
continue;
}
} else {
continue;
}
break;
}
case GT:
{
if (eventState instanceof DecimalType && filter.getState() instanceof DecimalType) {
if (((DecimalType) eventState).longValue() <= ((DecimalType) filter.getState()).longValue()) {
continue;
}
} else {
continue;
}
break;
}
}
}
// just filtered events are here...
final CaldavItem caldavItem = new CaldavItem(filter.getItemName(), eventState, eventContent.getTime().toDate());
caldavItem.setEvent(calDavEvent);
outList.add(caldavItem);
}
}
Collections.sort(outList, new Comparator<HistoricItem>() {
@Override
public int compare(HistoricItem arg0, HistoricItem arg1) {
if (filter.getOrdering().equals(FilterCriteria.Ordering.ASCENDING)) {
return (int) (arg0.getTimestamp().getTime() - arg1.getTimestamp().getTime());
} else {
return (int) (arg1.getTimestamp().getTime() - arg0.getTimestamp().getTime());
}
}
});
if (outList.size() < filter.getPageNumber() * filter.getPageSize()) {
return Collections.emptyList();
}
outList = outList.subList(filter.getPageNumber() * filter.getPageSize(), Math.min((filter.getPageNumber() * filter.getPageSize()) + filter.getPageSize(), outList.size()));
logger.trace("result size for query: {}", outList.size());
return outList;
}
Aggregations