use of org.eclipse.kapua.service.datastore.internal.model.query.MetricInfoQueryImpl in project kapua by eclipse.
the class MessageStoreServiceImpl method resetCache.
private void resetCache(String accountName, String topic) throws Exception {
boolean isAnyAsset;
boolean isAssetToDelete = false;
String semTopic;
if (topic != null) {
// determine if we should delete an asset if topic = account/asset/#
KapuaTopic kapuaTopic = new KapuaTopic(topic);
isAnyAsset = kapuaTopic.isAnyAsset();
semTopic = kapuaTopic.getSemanticTopic();
if (semTopic.isEmpty() && !isAnyAsset)
isAssetToDelete = true;
} else {
isAnyAsset = true;
semTopic = "";
isAssetToDelete = true;
}
// Find all topics
String everyIndex = EsUtils.getAnyIndexName(accountName);
int pageSize = 1000;
int offset = 0;
long totalHits = 1;
MetricInfoQueryImpl metricQuery = new MetricInfoQueryImpl();
metricQuery.setLimit(pageSize + 1);
metricQuery.setOffset(offset);
TopicMatchPredicateImpl topicPredicate = new TopicMatchPredicateImpl();
topicPredicate.setExpression(topic);
metricQuery.setPredicate(topicPredicate);
// Remove metrics
while (totalHits > 0) {
MetricInfoListResult metrics = EsMetricDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.METRIC_TYPE_NAME).query(metricQuery);
totalHits = metrics.size();
LocalCache<String, Boolean> metricsCache = DatastoreCacheManager.getInstance().getMetricsCache();
long toBeProcessed = totalHits > pageSize ? pageSize : totalHits;
for (int i = 0; i < toBeProcessed; i++) {
String id = metrics.get(i).getId().toString();
if (metricsCache.get(id))
metricsCache.remove(id);
}
if (totalHits > pageSize)
offset += (pageSize + 1);
}
logger.debug(String.format("Removed cached topic metrics for [%s]", topic));
EsMetricDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.METRIC_TYPE_NAME).deleteByQuery(metricQuery);
logger.debug(String.format("Removed topic metrics for [%s]", topic));
//
TopicInfoQueryImpl topicQuery = new TopicInfoQueryImpl();
topicQuery.setLimit(pageSize + 1);
topicQuery.setOffset(offset);
topicPredicate = new TopicMatchPredicateImpl();
topicPredicate.setExpression(topic);
topicQuery.setPredicate(topicPredicate);
// Remove topic
offset = 0;
totalHits = 1;
while (totalHits > 0) {
TopicInfoListResult topics = EsTopicDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.TOPIC_TYPE_NAME).query(topicQuery);
totalHits = topics.size();
LocalCache<String, Boolean> topicsCache = DatastoreCacheManager.getInstance().getTopicsCache();
long toBeProcessed = totalHits > pageSize ? pageSize : totalHits;
for (int i = 0; i < toBeProcessed; i++) {
String id = topics.get(0).getId().toString();
if (topicsCache.get(id))
topicsCache.remove(id);
}
if (totalHits > pageSize)
offset += (pageSize + 1);
}
logger.debug(String.format("Removed cached topics for [%s]", topic));
EsTopicDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.TOPIC_TYPE_NAME).deleteByQuery(topicQuery);
logger.debug(String.format("Removed topics for [%s]", topic));
// Remove asset
if (isAssetToDelete) {
AssetInfoQueryImpl assetQuery = new AssetInfoQueryImpl();
assetQuery.setLimit(pageSize + 1);
assetQuery.setOffset(offset);
topicPredicate = new TopicMatchPredicateImpl();
topicPredicate.setExpression(topic);
assetQuery.setPredicate(topicPredicate);
offset = 0;
totalHits = 1;
while (totalHits > 0) {
AssetInfoListResult assets = EsAssetDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.ASSET_TYPE_NAME).query(assetQuery);
totalHits = assets.size();
LocalCache<String, Boolean> assetsCache = DatastoreCacheManager.getInstance().getAssetsCache();
long toBeProcessed = totalHits > pageSize ? pageSize : totalHits;
for (int i = 0; i < toBeProcessed; i++) {
String id = assets.get(i).getId().toString();
if (assetsCache.get(id))
assetsCache.remove(id);
}
if (totalHits > pageSize)
offset += (pageSize + 1);
}
logger.debug(String.format("Removed cached assets for [%s]", topic));
EsAssetDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.ASSET_TYPE_NAME).deleteByQuery(assetQuery);
logger.debug(String.format("Removed assets for [%s]", topic));
}
}
use of org.eclipse.kapua.service.datastore.internal.model.query.MetricInfoQueryImpl in project kapua by eclipse.
the class MetricInfoStoreServiceImpl method find.
@Override
public MetricInfo find(KapuaId scopeId, StorableId id) throws KapuaException {
//
// Argument Validation
ArgumentValidator.notNull(scopeId, "scopeId");
ArgumentValidator.notNull(id, "id");
//
// Check Access
this.checkDataAccess(scopeId, Actions.read);
MetricInfoQueryImpl q = new MetricInfoQueryImpl();
q.setLimit(1);
ArrayList<StorableId> ids = new ArrayList<StorableId>();
ids.add(id);
AndPredicateImpl allPredicates = new AndPredicateImpl();
allPredicates.addPredicate(new IdsPredicateImpl(EsMessageField.ID, ids));
MetricInfoListResult result = this.query(scopeId, q);
if (result == null || result.size() == 0)
return null;
MetricInfo topicInfo = result.get(0);
return topicInfo;
}
use of org.eclipse.kapua.service.datastore.internal.model.query.MetricInfoQueryImpl in project kapua by eclipse.
the class TopicInfoStoreServiceImpl method delete.
//
// @Override
// public StorableId store(KapuaId scopeId, TopicInfoCreator creator)
// throws KapuaException
// {
// // TODO DAOs are ready, need to evaluate if this functionality
// // have to be available or not. Currently entries are added by
// // the message service directy
// throw KapuaException.internalError("Not implemented");
// }
//
// @Override
// public StorableId update(KapuaId scopeId, TopicInfo creator)
// throws KapuaException
// {
// // TODO DAOs are ready, need to evaluate if this functionality
// // have to be available or not. Currently entries are added by
// // the message service directy
// throw KapuaException.internalError("Not implemented");
// }
@Override
public void delete(KapuaId scopeId, StorableId id) throws KapuaException {
//
// Argument Validation
ArgumentValidator.notNull(scopeId, "scopeId");
ArgumentValidator.notNull(id, "id");
//
// Check Access
this.checkDataAccess(scopeId, Actions.delete);
//
// Do the find
AccountInfo accountInfo = getAccountServicePlan(scopeId);
String scopeName = accountInfo.getAccount().getName();
LocalServicePlan accountServicePlan = accountInfo.getServicePlan();
long ttl = accountServicePlan.getDataTimeToLive() * DAY_MILLIS;
if (!accountServicePlan.getDataStorageEnabled() || ttl == LocalServicePlan.DISABLED) {
logger.debug("Storage not enabled for account {}, return", scopeName);
return;
}
try {
String everyIndex = EsUtils.getAnyIndexName(scopeName);
TopicInfo topicInfo = this.find(scopeId, id);
MessageQueryImpl mqi = new MessageQueryImpl();
TopicMatchPredicateImpl predicate = new TopicMatchPredicateImpl(topicInfo.getFullTopicName());
mqi.setPredicate(predicate);
EsMessageDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.MESSAGE_TYPE_NAME).setListener(null).deleteByQuery(mqi);
MetricInfoQueryImpl miqi = new MetricInfoQueryImpl();
mqi.setPredicate(predicate);
EsMetricDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.METRIC_TYPE_NAME).setListener(null).deleteByQuery(miqi);
EsTopicDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.TOPIC_TYPE_NAME).deleteById(id.toString());
} catch (Exception exc) {
// CassandraUtils.handleException(e);
throw KapuaException.internalError(exc);
}
}
use of org.eclipse.kapua.service.datastore.internal.model.query.MetricInfoQueryImpl in project kapua by eclipse.
the class EsMetricDAO method query.
public MetricInfoListResult query(MetricInfoQuery query) throws Exception {
// get one plus (if there is one) to later get the next key value
MetricInfoQueryImpl localQuery = new MetricInfoQueryImpl();
localQuery.copy(query);
localQuery.setLimit(query.getLimit() + 1);
MetricInfoQueryConverter mic = new MetricInfoQueryConverter();
SearchRequestBuilder builder = mic.toSearchRequestBuilder(esTypeDAO.getIndexName(), esTypeDAO.getTypeName(), localQuery);
SearchResponse response = builder.get(TimeValue.timeValueMillis(EsUtils.getQueryTimeout()));
SearchHits searchHits = response.getHits();
if (searchHits == null || searchHits.getTotalHits() == 0)
return new MetricInfoListResultImpl();
int i = 0;
int searchHitsSize = searchHits.getHits().length;
List<MetricInfo> metricInfos = new ArrayList<MetricInfo>();
MetricInfoBuilder metricInfoBuilder = new MetricInfoBuilder();
for (SearchHit searchHit : searchHits.getHits()) {
if (i < query.getLimit()) {
MetricInfo metricInfo = metricInfoBuilder.build(searchHit).getKapuaMetricInfo();
metricInfos.add(metricInfo);
}
i++;
}
// TODO check equivalence with CX with Pierantonio
// TODO what is this nextKey
Object nextKey = null;
if (searchHitsSize > query.getLimit()) {
nextKey = query.getLimit();
}
MetricInfoListResult result = new MetricInfoListResultImpl(nextKey, metricInfos.size());
result.addAll(metricInfos);
return result;
}
use of org.eclipse.kapua.service.datastore.internal.model.query.MetricInfoQueryImpl in project kapua by eclipse.
the class TopicInfoStoreServiceImpl method delete.
@Override
public void delete(KapuaId scopeId, TopicInfoQuery query) throws KapuaException {
//
// Argument Validation
ArgumentValidator.notNull(scopeId, "scopeId");
ArgumentValidator.notNull(query, "query");
//
// Check Access
this.checkDataAccess(scopeId, Actions.delete);
//
// Do the find
AccountInfo accountInfo = getAccountServicePlan(scopeId);
String scopeName = accountInfo.getAccount().getName();
LocalServicePlan accountServicePlan = accountInfo.getServicePlan();
long ttl = accountServicePlan.getDataTimeToLive() * DAY_MILLIS;
if (!accountServicePlan.getDataStorageEnabled() || ttl == LocalServicePlan.DISABLED) {
logger.debug("Storage not enabled for account {}, skipping delete", scopeName);
return;
}
try {
String everyIndex = EsUtils.getAnyIndexName(scopeName);
TopicInfoListResult topics = this.query(scopeId, query);
for (TopicInfo topicInfo : topics) {
// TODO Improve performances
MessageQueryImpl mqi = new MessageQueryImpl();
TopicMatchPredicateImpl predicate = new TopicMatchPredicateImpl(topicInfo.getFullTopicName());
mqi.setPredicate(predicate);
EsMessageDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.MESSAGE_TYPE_NAME).setListener(null).deleteByQuery(mqi);
MetricInfoQueryImpl miqi = new MetricInfoQueryImpl();
mqi.setPredicate(predicate);
EsMetricDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.METRIC_TYPE_NAME).setListener(null).deleteByQuery(miqi);
}
EsTopicDAO.connection(EsClient.getcurrent()).instance(everyIndex, EsSchema.TOPIC_TYPE_NAME).deleteByQuery(query);
return;
} catch (Exception exc) {
// CassandraUtils.handleException(e);
throw KapuaException.internalError(exc);
}
}
Aggregations