use of io.siddhi.query.api.definition.Attribute in project siddhi by wso2.
the class AbstractQueryableRecordTable method initCache.
@Override
public void initCache(TableDefinition tableDefinition, SiddhiAppContext siddhiAppContext, StreamEventCloner storeEventCloner, ConfigReader configReader) {
String[] annotationNames = { ANNOTATION_STORE, ANNOTATION_CACHE };
Annotation cacheTableAnnotation = getAnnotation(annotationNames, tableDefinition.getAnnotations());
if (cacheTableAnnotation != null) {
cacheEnabled = true;
maxCacheSize = Integer.parseInt(cacheTableAnnotation.getElement(CACHE_TABLE_SIZE));
TableDefinition cacheTableDefinition = TableDefinition.id(tableDefinition.getId());
for (Attribute attribute : tableDefinition.getAttributeList()) {
cacheTableDefinition.attribute(attribute.getName(), attribute.getType());
}
for (Annotation annotation : tableDefinition.getAnnotations()) {
if (!annotation.getName().equalsIgnoreCase("Store")) {
cacheTableDefinition.annotation(annotation);
}
}
String cachePolicy = cacheTableAnnotation.getElement(ANNOTATION_CACHE_POLICY);
if (cachePolicy == null || cachePolicy.equalsIgnoreCase("FIFO")) {
cachePolicy = "FIFO";
cacheTable = new CacheTableFIFO();
} else if (cachePolicy.equalsIgnoreCase("LRU")) {
cacheTable = new CacheTableLRU();
} else if (cachePolicy.equalsIgnoreCase("LFU")) {
cacheTable = new CacheTableLFU();
} else {
throw new SiddhiAppCreationException(siddhiAppContext.getName() + " : Cache policy can only be one " + "of FIFO, LRU, and LFU but given as " + cachePolicy);
}
// check if cache expiry enabled and initialize relevant parameters
if (cacheTableAnnotation.getElement(ANNOTATION_CACHE_RETENTION_PERIOD) != null) {
cacheExpiryEnabled = true;
retentionPeriod = Expression.Time.timeToLong(cacheTableAnnotation.getElement(ANNOTATION_CACHE_RETENTION_PERIOD));
if (cacheTableAnnotation.getElement(ANNOTATION_CACHE_PURGE_INTERVAL) == null) {
purgeInterval = retentionPeriod;
} else {
purgeInterval = Expression.Time.timeToLong(cacheTableAnnotation.getElement(ANNOTATION_CACHE_PURGE_INTERVAL));
}
storeSizeCheckInterval = purgeInterval * 5;
} else {
storeSizeCheckInterval = 10000;
}
((CacheTable) cacheTable).initCacheTable(cacheTableDefinition, configReader, siddhiAppContext, recordTableHandler, cacheExpiryEnabled, maxCacheSize, cachePolicy);
// creating objects needed to load cache
SiddhiQueryContext siddhiQueryContext = new SiddhiQueryContext(siddhiAppContext, CACHE_QUERY_NAME + tableDefinition.getId());
MatchingMetaInfoHolder matchingMetaInfoHolder = generateMatchingMetaInfoHolderForCacheTable(tableDefinition);
OnDemandQuery onDemandQuery = OnDemandQuery.query().from(InputStore.store(tableDefinition.getId())).select(Selector.selector().limit(Expression.value((maxCacheSize + 1))));
List<VariableExpressionExecutor> variableExpressionExecutors = new ArrayList<>();
compiledConditionForCaching = compileCondition(Expression.value(true), matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
List<Attribute> expectedOutputAttributes = buildExpectedOutputAttributes(onDemandQuery, tableMap, SiddhiConstants.UNKNOWN_STATE, matchingMetaInfoHolder, siddhiQueryContext);
compiledSelectionForCaching = compileSelection(onDemandQuery.getSelector(), expectedOutputAttributes, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
outputAttributesForCaching = expectedOutputAttributes.toArray(new Attribute[0]);
QueryParserHelper.reduceMetaComplexEvent(matchingMetaInfoHolder.getMetaStateEvent());
QueryParserHelper.updateVariablePosition(matchingMetaInfoHolder.getMetaStateEvent(), variableExpressionExecutors);
compiledSelectionForSelectAll = generateCSForSelectAll();
}
}
use of io.siddhi.query.api.definition.Attribute in project siddhi by wso2.
the class AbstractQueryableRecordTable method generateCSForSelectAll.
private CompiledSelection generateCSForSelectAll() {
MetaStreamEvent metaStreamEventForSelectAll = new MetaStreamEvent();
for (Attribute attribute : tableDefinition.getAttributeList()) {
metaStreamEventForSelectAll.addOutputData(attribute);
}
metaStreamEventForSelectAll.addInputDefinition(tableDefinition);
MetaStateEvent metaStateEventForSelectAll = new MetaStateEvent(1);
metaStateEventForSelectAll.addEvent(metaStreamEventForSelectAll);
MatchingMetaInfoHolder matchingMetaInfoHolderForSlectAll = new MatchingMetaInfoHolder(metaStateEventForSelectAll, -1, 0, tableDefinition, tableDefinition, 0);
List<OutputAttribute> outputAttributesAll = new ArrayList<>();
List<Attribute> attributeList = tableDefinition.getAttributeList();
for (Attribute attribute : attributeList) {
outputAttributesAll.add(new OutputAttribute(new Variable(attribute.getName())));
}
List<SelectAttributeBuilder> selectAttributeBuilders = new ArrayList<>(outputAttributesAll.size());
List<VariableExpressionExecutor> variableExpressionExecutors = new ArrayList<>();
for (OutputAttribute outputAttribute : outputAttributesAll) {
ExpressionBuilder expressionBuilder = new ExpressionBuilder(outputAttribute.getExpression(), matchingMetaInfoHolderForSlectAll, variableExpressionExecutors, tableMap, null, null, null);
selectAttributeBuilders.add(new SelectAttributeBuilder(expressionBuilder, outputAttribute.getRename()));
}
return compileSelection(selectAttributeBuilders, null, null, null, null, null);
}
use of io.siddhi.query.api.definition.Attribute in project siddhi by wso2.
the class CacheExpirer method generateExpiryCompiledCondition.
private CompiledCondition generateExpiryCompiledCondition() {
MetaStreamEvent tableMetaStreamEvent = new MetaStreamEvent();
tableMetaStreamEvent.setEventType(MetaStreamEvent.EventType.TABLE);
TableDefinition matchingTableDefinition = TableDefinition.id(cacheTable.getTableDefinition().getId());
for (Attribute attribute : cacheTable.getTableDefinition().getAttributeList()) {
tableMetaStreamEvent.addOutputData(attribute);
matchingTableDefinition.attribute(attribute.getName(), attribute.getType());
}
tableMetaStreamEvent.addInputDefinition(matchingTableDefinition);
streamEventFactory = new StreamEventFactory(tableMetaStreamEvent);
Variable rightExpressionForSubtract = new Variable(CACHE_TABLE_TIMESTAMP_ADDED);
rightExpressionForSubtract.setStreamId(cacheTable.getTableDefinition().getId());
Expression rightExpressionForCompare = new LongConstant(retentionPeriod);
Compare.Operator greaterThanOperator = Compare.Operator.GREATER_THAN;
MetaStreamEvent currentTimeMetaStreamEvent = new MetaStreamEvent();
currentTimeMetaStreamEvent.setEventType(MetaStreamEvent.EventType.TABLE);
Attribute currentTimeAttribute = new Attribute(CACHE_EXPIRE_CURRENT_TIME, Attribute.Type.LONG);
currentTimeMetaStreamEvent.addOutputData(currentTimeAttribute);
TableDefinition currentTimeTableDefinition = TableDefinition.id("");
currentTimeTableDefinition.attribute(CACHE_EXPIRE_CURRENT_TIME, Attribute.Type.LONG);
currentTimeMetaStreamEvent.addInputDefinition(currentTimeTableDefinition);
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
metaStateEvent.addEvent(currentTimeMetaStreamEvent);
metaStateEvent.addEvent(tableMetaStreamEvent);
MatchingMetaInfoHolder matchingMetaInfoHolder = MatcherParser.constructMatchingMetaStateHolder(metaStateEvent, 0, cacheTable.getTableDefinition(), 0);
List<VariableExpressionExecutor> variableExpressionExecutors = new ArrayList<>();
Expression leftExpressionForSubtract = new Variable(CACHE_EXPIRE_CURRENT_TIME);
Expression leftExpressionForCompare = new Subtract(leftExpressionForSubtract, rightExpressionForSubtract);
Expression deleteCondition = new Compare(leftExpressionForCompare, greaterThanOperator, rightExpressionForCompare);
SiddhiQueryContext siddhiQueryContext = new SiddhiQueryContext(siddhiAppContext, "expiryDeleteQuery");
return cacheTable.compileCondition(deleteCondition, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
}
use of io.siddhi.query.api.definition.Attribute in project siddhi by wso2.
the class IncrementalDataPurger method matchingMetaInfoHolder.
/**
* Building the MatchingMetaInfoHolder for delete records
*/
private MatchingMetaInfoHolder matchingMetaInfoHolder(Table table, Attribute attribute) {
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
MetaStreamEvent metaStreamEventWithDeletePara = new MetaStreamEvent();
MetaStreamEvent metaStreamEventForTable = new MetaStreamEvent();
TableDefinition deleteTableDefinition = TableDefinition.id("");
deleteTableDefinition.attribute(attribute.getName(), attribute.getType());
metaStreamEventWithDeletePara.setEventType(MetaStreamEvent.EventType.TABLE);
metaStreamEventWithDeletePara.addOutputData(attribute);
metaStreamEventWithDeletePara.addInputDefinition(deleteTableDefinition);
metaStreamEventForTable.setEventType(MetaStreamEvent.EventType.TABLE);
for (Attribute attributes : table.getTableDefinition().getAttributeList()) {
metaStreamEventForTable.addOutputData(attributes);
}
metaStreamEventForTable.addInputDefinition(table.getTableDefinition());
metaStateEvent.addEvent(metaStreamEventWithDeletePara);
metaStateEvent.addEvent(metaStreamEventForTable);
TableDefinition definition = table.getTableDefinition();
return new MatchingMetaInfoHolder(metaStateEvent, 0, 1, deleteTableDefinition, definition, 0);
}
use of io.siddhi.query.api.definition.Attribute in project siddhi by wso2.
the class StreamEventConverterFactory method getConversionElements.
private static List<StreamEventConverter.ConversionMapping> getConversionElements(MetaStreamEvent metaStreamEvent, int size) {
AbstractDefinition inputDefinition = metaStreamEvent.getInputDefinitions().get(0);
List<StreamEventConverter.ConversionMapping> conversionMappings = new ArrayList<StreamEventConverter.ConversionMapping>(size);
for (int j = 0; j < 3; j++) {
List<Attribute> currentDataList = null;
if (j == 0) {
currentDataList = metaStreamEvent.getBeforeWindowData();
} else if (j == 1) {
currentDataList = metaStreamEvent.getOnAfterWindowData();
} else if (j == 2) {
currentDataList = metaStreamEvent.getOutputData();
}
if (currentDataList != null) {
int i = 0;
for (Attribute attribute : currentDataList) {
// Only variable slots will be filled.
if (attribute == null) {
i++;
} else if (!inputDefinition.getAttributeList().contains(attribute)) {
i++;
} else {
int fromPosition = inputDefinition.getAttributePosition(attribute.getName());
StreamEventConverter.ConversionMapping conversionMapping = new StreamEventConverter.ConversionMapping();
conversionMapping.setFromPosition(fromPosition);
int[] toPosition = new int[2];
toPosition[0] = j;
toPosition[1] = i;
conversionMapping.setToPosition(toPosition);
conversionMappings.add(conversionMapping);
i++;
}
}
}
}
return conversionMappings;
}
Aggregations