use of io.siddhi.core.table.CacheTable in project siddhi by wso2.
the class AbstractQueryableRecordTable method find.
@Override
public StreamEvent find(CompiledCondition compiledCondition, StateEvent matchingEvent) throws ConnectionUnavailableException {
try {
updateStoreTableSize();
} catch (ConnectionUnavailableException e) {
log.error(e);
}
// handle compile condition type conv
RecordStoreCompiledCondition recordStoreCompiledCondition;
CompiledConditionWithCache compiledConditionWithCache = null;
findMatchingEvent = matchingEvent;
if (cacheEnabled) {
RecordStoreCompiledCondition compiledConditionTemp = (RecordStoreCompiledCondition) compiledCondition;
compiledConditionWithCache = (CompiledConditionWithCache) compiledConditionTemp.getCompiledCondition();
recordStoreCompiledCondition = new RecordStoreCompiledCondition(compiledConditionTemp.variableExpressionExecutorMap, compiledConditionWithCache.getStoreCompileCondition(), compiledConditionTemp.getSiddhiQueryContext());
} else {
recordStoreCompiledCondition = ((RecordStoreCompiledCondition) compiledCondition);
}
StreamEvent cacheResults;
if (cacheEnabled) {
readWriteLock.writeLock().lock();
try {
// when table is smaller than max cache send results from cache
if (storeTableSize <= maxCacheSize) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": store table size is smaller than max cache. Sending results from cache");
}
cacheResults = cacheTable.find(compiledConditionWithCache.getCacheCompileCondition(), matchingEvent);
return cacheResults;
} else {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": store table size is bigger than cache.");
}
if (compiledConditionWithCache.isRouteToCache()) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": cache constraints satisfied. Checking cache");
}
cacheResults = cacheTable.find(compiledConditionWithCache.getCacheCompileCondition(), matchingEvent);
if (cacheResults != null) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": cache hit. Sending results from cache");
}
return cacheResults;
}
// cache miss
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": cache miss. Loading from store");
}
StreamEvent streamEvent = super.find(recordStoreCompiledCondition, matchingEvent);
if (streamEvent == null) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": store also miss. sending null");
}
return null;
}
if (cacheTable.size() == maxCacheSize) {
((CacheTable) cacheTable).deleteOneEntryUsingCachePolicy();
}
((CacheTable) cacheTable).addStreamEventUptoMaxSize(streamEvent);
cacheResults = cacheTable.find(compiledConditionWithCache.getCacheCompileCondition(), matchingEvent);
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": sending results from cache after loading from store");
}
return cacheResults;
}
}
} finally {
readWriteLock.writeLock().unlock();
}
}
// when cache is not enabled or cache query conditions are not satisfied
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": sending results from store");
}
return super.find(recordStoreCompiledCondition, matchingEvent);
}
use of io.siddhi.core.table.CacheTable in project siddhi by wso2.
the class AbstractQueryableRecordTable method initCache.
@Override
public void initCache(TableDefinition tableDefinition, SiddhiAppContext siddhiAppContext, StreamEventCloner storeEventCloner, ConfigReader configReader) {
String[] annotationNames = { ANNOTATION_STORE, ANNOTATION_CACHE };
Annotation cacheTableAnnotation = getAnnotation(annotationNames, tableDefinition.getAnnotations());
if (cacheTableAnnotation != null) {
cacheEnabled = true;
maxCacheSize = Integer.parseInt(cacheTableAnnotation.getElement(CACHE_TABLE_SIZE));
TableDefinition cacheTableDefinition = TableDefinition.id(tableDefinition.getId());
for (Attribute attribute : tableDefinition.getAttributeList()) {
cacheTableDefinition.attribute(attribute.getName(), attribute.getType());
}
for (Annotation annotation : tableDefinition.getAnnotations()) {
if (!annotation.getName().equalsIgnoreCase("Store")) {
cacheTableDefinition.annotation(annotation);
}
}
String cachePolicy = cacheTableAnnotation.getElement(ANNOTATION_CACHE_POLICY);
if (cachePolicy == null || cachePolicy.equalsIgnoreCase("FIFO")) {
cachePolicy = "FIFO";
cacheTable = new CacheTableFIFO();
} else if (cachePolicy.equalsIgnoreCase("LRU")) {
cacheTable = new CacheTableLRU();
} else if (cachePolicy.equalsIgnoreCase("LFU")) {
cacheTable = new CacheTableLFU();
} else {
throw new SiddhiAppCreationException(siddhiAppContext.getName() + " : Cache policy can only be one " + "of FIFO, LRU, and LFU but given as " + cachePolicy);
}
// check if cache expiry enabled and initialize relevant parameters
if (cacheTableAnnotation.getElement(ANNOTATION_CACHE_RETENTION_PERIOD) != null) {
cacheExpiryEnabled = true;
retentionPeriod = Expression.Time.timeToLong(cacheTableAnnotation.getElement(ANNOTATION_CACHE_RETENTION_PERIOD));
if (cacheTableAnnotation.getElement(ANNOTATION_CACHE_PURGE_INTERVAL) == null) {
purgeInterval = retentionPeriod;
} else {
purgeInterval = Expression.Time.timeToLong(cacheTableAnnotation.getElement(ANNOTATION_CACHE_PURGE_INTERVAL));
}
storeSizeCheckInterval = purgeInterval * 5;
} else {
storeSizeCheckInterval = 10000;
}
((CacheTable) cacheTable).initCacheTable(cacheTableDefinition, configReader, siddhiAppContext, recordTableHandler, cacheExpiryEnabled, maxCacheSize, cachePolicy);
// creating objects needed to load cache
SiddhiQueryContext siddhiQueryContext = new SiddhiQueryContext(siddhiAppContext, CACHE_QUERY_NAME + tableDefinition.getId());
MatchingMetaInfoHolder matchingMetaInfoHolder = generateMatchingMetaInfoHolderForCacheTable(tableDefinition);
OnDemandQuery onDemandQuery = OnDemandQuery.query().from(InputStore.store(tableDefinition.getId())).select(Selector.selector().limit(Expression.value((maxCacheSize + 1))));
List<VariableExpressionExecutor> variableExpressionExecutors = new ArrayList<>();
compiledConditionForCaching = compileCondition(Expression.value(true), matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
List<Attribute> expectedOutputAttributes = buildExpectedOutputAttributes(onDemandQuery, tableMap, SiddhiConstants.UNKNOWN_STATE, matchingMetaInfoHolder, siddhiQueryContext);
compiledSelectionForCaching = compileSelection(onDemandQuery.getSelector(), expectedOutputAttributes, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
outputAttributesForCaching = expectedOutputAttributes.toArray(new Attribute[0]);
QueryParserHelper.reduceMetaComplexEvent(matchingMetaInfoHolder.getMetaStateEvent());
QueryParserHelper.updateVariablePosition(matchingMetaInfoHolder.getMetaStateEvent(), variableExpressionExecutors);
compiledSelectionForSelectAll = generateCSForSelectAll();
}
}
use of io.siddhi.core.table.CacheTable in project siddhi by wso2.
the class AbstractQueryableRecordTable method connectAndLoadCache.
@Override
protected void connectAndLoadCache() throws ConnectionUnavailableException {
connect();
if (cacheEnabled) {
((CacheTable) cacheTable).deleteAll();
StateEvent stateEventForCaching = new StateEvent(1, 0);
StreamEvent preLoadedData;
queryStoreWithoutCheckingCache.set(Boolean.TRUE);
try {
preLoadedData = query(stateEventForCaching, compiledConditionForCaching, compiledSelectionForCaching, outputAttributesForCaching);
} finally {
queryStoreWithoutCheckingCache.set(Boolean.FALSE);
}
if (preLoadedData != null) {
((CacheTable) cacheTable).addStreamEventUptoMaxSize(preLoadedData);
}
if (cacheExpiryEnabled) {
siddhiAppContext.getScheduledExecutorService().scheduleAtFixedRate(new CacheExpirer(retentionPeriod, cacheTable, tableMap, this, siddhiAppContext).generateCacheExpirer(), 0, purgeInterval, TimeUnit.MILLISECONDS);
}
}
}
use of io.siddhi.core.table.CacheTable in project siddhi by wso2.
the class AbstractQueryableRecordTable method query.
@Override
public StreamEvent query(StateEvent matchingEvent, CompiledCondition compiledCondition, CompiledSelection compiledSelection, Attribute[] outputAttributes) throws ConnectionUnavailableException {
findMatchingEvent = matchingEvent;
updateStoreTableSize();
// handle condition type convs
ComplexEventChunk<StreamEvent> streamEventComplexEventChunk = new ComplexEventChunk<>();
RecordStoreCompiledCondition recordStoreCompiledCondition;
RecordStoreCompiledSelection recordStoreCompiledSelection;
CompiledConditionWithCache compiledConditionWithCache = null;
CompiledSelectionWithCache compiledSelectionWithCache = null;
StreamEvent cacheResults;
if (cacheEnabled) {
RecordStoreCompiledCondition compiledConditionTemp = (RecordStoreCompiledCondition) compiledCondition;
compiledConditionWithCache = (CompiledConditionWithCache) compiledConditionTemp.getCompiledCondition();
recordStoreCompiledCondition = new RecordStoreCompiledCondition(compiledConditionTemp.variableExpressionExecutorMap, compiledConditionWithCache.getStoreCompileCondition(), compiledConditionTemp.getSiddhiQueryContext());
compiledSelectionWithCache = (CompiledSelectionWithCache) compiledSelection;
recordStoreCompiledSelection = compiledSelectionWithCache.recordStoreCompiledSelection;
} else {
recordStoreCompiledSelection = ((RecordStoreCompiledSelection) compiledSelection);
recordStoreCompiledCondition = ((RecordStoreCompiledCondition) compiledCondition);
}
Map<String, Object> parameterMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
parameterMap.put(entry.getKey(), entry.getValue().execute(matchingEvent));
}
for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledSelection.variableExpressionExecutorMap.entrySet()) {
parameterMap.put(entry.getKey(), entry.getValue().execute(matchingEvent));
}
Iterator<Object[]> records;
if (cacheEnabled) {
readWriteLock.writeLock().lock();
try {
// when store is smaller than max cache size
if (storeTableSize <= maxCacheSize && !queryStoreWithoutCheckingCache.get()) {
// return results from cache
cacheResults = cacheTable.find(compiledConditionWithCache.getCacheCompileCondition(), matchingEvent);
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": store table size is smaller than max cache. Sending results from cache");
}
if (cacheResults == null) {
return null;
}
return executeSelectorOnCacheResults(compiledSelectionWithCache, cacheResults, matchingEvent.getStreamEvent(0));
} else {
// when store is bigger than max cache size
if (log.isDebugEnabled() && !queryStoreWithoutCheckingCache.get()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": store table size is bigger than cache.");
}
if (compiledConditionWithCache.isRouteToCache() && !queryStoreWithoutCheckingCache.get()) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": cache constraints satisfied. Checking cache");
}
// if query conrains all primary keys and has == only for them
cacheResults = cacheTable.find(compiledConditionWithCache.getCacheCompileCondition(), matchingEvent);
if (cacheResults != null) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": cache hit. Sending results from cache");
}
return executeSelectorOnCacheResults(compiledSelectionWithCache, cacheResults, matchingEvent.getStreamEvent(0));
}
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": cache miss. Loading from store");
}
// read all fields of missed entry from store
Iterator<Object[]> recordsFromSelectAll;
if (recordTableHandler != null) {
recordsFromSelectAll = recordTableHandler.query(matchingEvent.getTimestamp(), parameterMap, recordStoreCompiledCondition.getCompiledCondition(), compiledSelectionForSelectAll, outputAttributes);
} else {
recordsFromSelectAll = query(parameterMap, recordStoreCompiledCondition.getCompiledCondition(), compiledSelectionForSelectAll, outputAttributes);
}
if (recordsFromSelectAll == null || !recordsFromSelectAll.hasNext()) {
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": store also miss. sending null");
}
return null;
}
Object[] recordSelectAll = recordsFromSelectAll.next();
StreamEvent streamEvent = storeEventPool.newInstance();
streamEvent.setOutputData(new Object[outputAttributes.length]);
System.arraycopy(recordSelectAll, 0, streamEvent.getOutputData(), 0, recordSelectAll.length);
if (cacheTable.size() == maxCacheSize) {
((CacheTable) cacheTable).deleteOneEntryUsingCachePolicy();
}
((CacheTable) cacheTable).addStreamEventUptoMaxSize(streamEvent);
if (log.isDebugEnabled()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": sending results from cache after loading from store");
}
cacheResults = cacheTable.find(compiledConditionWithCache.getCacheCompileCondition(), matchingEvent);
return executeSelectorOnCacheResults(compiledSelectionWithCache, cacheResults, matchingEvent.getStreamEvent(0));
}
}
} finally {
readWriteLock.writeLock().unlock();
}
}
if (log.isDebugEnabled() && !queryStoreWithoutCheckingCache.get()) {
log.debug(siddhiAppContext.getName() + "-" + recordStoreCompiledCondition.getSiddhiQueryContext().getName() + ": sending results from store");
}
// query conditions are not satisfied check from store/ cache not enabled
if (recordTableHandler != null) {
records = recordTableHandler.query(matchingEvent.getTimestamp(), parameterMap, recordStoreCompiledCondition.getCompiledCondition(), recordStoreCompiledSelection.compiledSelection, outputAttributes);
} else {
records = query(parameterMap, recordStoreCompiledCondition.getCompiledCondition(), recordStoreCompiledSelection.compiledSelection, outputAttributes);
}
addStreamEventToChunk(outputAttributes, streamEventComplexEventChunk, records);
return streamEventComplexEventChunk.getFirst();
}
Aggregations