Search in sources :

Example 1 with Annotation

use of io.siddhi.query.api.annotation.Annotation in project siddhi by wso2.

the class IncrementalDataPurger method init.

public void init(AggregationDefinition aggregationDefinition, StreamEventFactory streamEventFactory, Map<TimePeriod.Duration, Table> aggregationTables, Boolean isProcessingOnExternalTime, SiddhiQueryContext siddhiQueryContext, List<TimePeriod.Duration> activeIncrementalDurations, String timeZone, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap) {
    this.siddhiQueryContext = siddhiQueryContext;
    this.aggregationDefinition = aggregationDefinition;
    List<Annotation> annotations = aggregationDefinition.getAnnotations();
    this.streamEventFactory = streamEventFactory;
    this.aggregationTables = aggregationTables;
    this.activeIncrementalDurations = activeIncrementalDurations;
    this.windowMap = windowMap;
    this.aggregationMap = aggregationMap;
    if (isProcessingOnExternalTime) {
        purgingTimestampField = AGG_EXTERNAL_TIMESTAMP_COL;
    } else {
        purgingTimestampField = AGG_START_TIMESTAMP_COL;
    }
    aggregatedTimestampAttribute = new Attribute(purgingTimestampField, Attribute.Type.LONG);
    VariableExpressionExecutor variableExpressionExecutor = new VariableExpressionExecutor(aggregatedTimestampAttribute, 0, 1);
    variableExpressionExecutorList.add(variableExpressionExecutor);
    for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
        this.tableMap.put(entry.getValue().getTableDefinition().getId(), entry.getValue());
        switch(entry.getKey()) {
            case SECONDS:
                retentionPeriods.put(entry.getKey(), Expression.Time.sec(120).value());
                minimumDurationMap.put(entry.getKey(), Expression.Time.sec(120).value());
                break;
            case MINUTES:
                retentionPeriods.put(entry.getKey(), Expression.Time.hour(24).value());
                minimumDurationMap.put(entry.getKey(), Expression.Time.minute(120).value());
                break;
            case HOURS:
                retentionPeriods.put(entry.getKey(), Expression.Time.day(30).value());
                minimumDurationMap.put(entry.getKey(), Expression.Time.hour(25).value());
                break;
            case DAYS:
                retentionPeriods.put(entry.getKey(), Expression.Time.year(1).value());
                minimumDurationMap.put(entry.getKey(), Expression.Time.day(32).value());
                break;
            case MONTHS:
                retentionPeriods.put(entry.getKey(), RETAIN_ALL);
                minimumDurationMap.put(entry.getKey(), Expression.Time.month(13).value());
                break;
            case YEARS:
                retentionPeriods.put(entry.getKey(), RETAIN_ALL);
                minimumDurationMap.put(entry.getKey(), 0L);
        }
    }
    this.timeZone = timeZone;
    Map<String, Annotation> annotationTypes = new HashMap<>();
    for (Annotation annotation : annotations) {
        annotationTypes.put(annotation.getName().toLowerCase(), annotation);
    }
    Annotation purge = annotationTypes.get(SiddhiConstants.NAMESPACE_PURGE);
    if (purge != null) {
        if (purge.getElement(SiddhiConstants.ANNOTATION_ELEMENT_ENABLE) != null) {
            String purgeEnable = purge.getElement(SiddhiConstants.ANNOTATION_ELEMENT_ENABLE);
            if (!("true".equalsIgnoreCase(purgeEnable) || "false".equalsIgnoreCase(purgeEnable))) {
                throw new SiddhiAppCreationException("Invalid value for enable: " + purgeEnable + "." + " Please use true or false");
            } else {
                purgingEnabled = Boolean.parseBoolean(purgeEnable);
            }
        }
        if (purgingEnabled) {
            // If interval is defined, default value of 15 min will be replaced by user input value
            if (purge.getElement(SiddhiConstants.ANNOTATION_ELEMENT_INTERVAL) != null) {
                String interval = purge.getElement(SiddhiConstants.ANNOTATION_ELEMENT_INTERVAL);
                purgeExecutionInterval = timeToLong(interval);
            }
            List<Annotation> retentions = purge.getAnnotations(SiddhiConstants.NAMESPACE_RETENTION_PERIOD);
            if (retentions != null && !retentions.isEmpty()) {
                Annotation retention = retentions.get(0);
                List<Element> elements = retention.getElements();
                for (Element element : elements) {
                    TimePeriod.Duration duration = normalizeDuration(element.getKey());
                    if (!activeIncrementalDurations.contains(duration)) {
                        throw new SiddhiAppCreationException(duration + " granularity cannot be purged since " + "aggregation has not performed in " + duration + " granularity");
                    }
                    if (element.getValue().equalsIgnoreCase(RETAIN_ALL_VALUES)) {
                        retentionPeriods.put(duration, RETAIN_ALL);
                    } else {
                        if (timeToLong(element.getValue()) >= minimumDurationMap.get(duration)) {
                            retentionPeriods.put(duration, timeToLong(element.getValue()));
                        } else {
                            throw new SiddhiAppCreationException(duration + " granularity cannot be purge" + " with a retention of '" + element.getValue() + "', minimum retention" + " should be greater  than " + TimeUnit.MILLISECONDS.toMinutes(minimumDurationMap.get(duration)) + " minutes");
                        }
                    }
                }
            }
        }
    }
    compiledConditionsHolder = createCompileConditions(aggregationTables, tableMap);
}
Also used : Table(io.siddhi.core.table.Table) Attribute(io.siddhi.query.api.definition.Attribute) OrderByAttribute(io.siddhi.query.api.execution.query.selection.OrderByAttribute) OutputAttribute(io.siddhi.query.api.execution.query.selection.OutputAttribute) HashMap(java.util.HashMap) SiddhiAppCreationException(io.siddhi.core.exception.SiddhiAppCreationException) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) VariableExpressionExecutor(io.siddhi.core.executor.VariableExpressionExecutor) Element(io.siddhi.query.api.annotation.Element) Time.normalizeDuration(io.siddhi.query.api.expression.Expression.Time.normalizeDuration) Annotation(io.siddhi.query.api.annotation.Annotation) HashMap(java.util.HashMap) Map(java.util.Map) EnumMap(java.util.EnumMap)

Example 2 with Annotation

use of io.siddhi.query.api.annotation.Annotation in project siddhi by wso2.

the class SiddhiAppRuntimeImpl method collectDeprecateWarnings.

private void collectDeprecateWarnings() {
    Map<String, Class> deprecatedExtensions = siddhiAppContext.getSiddhiContext().getDeprecatedSiddhiExtensions();
    List<AbstractDefinition> extensionsInUse = new ArrayList<>();
    extensionsInUse.addAll(streamDefinitionMap.values());
    extensionsInUse.addAll(tableDefinitionMap.values());
    extensionsInUse.addAll(windowDefinitionMap.values());
    extensionsInUse.addAll(aggregationDefinitionMap.values());
    for (AbstractDefinition extDefinition : extensionsInUse) {
        for (Annotation annotation : extDefinition.getAnnotations()) {
            String type = annotation.getElement(SiddhiConstants.ANNOTATION_ELEMENT_TYPE);
            if (annotation.getName().equalsIgnoreCase(SiddhiConstants.ANNOTATION_SOURCE)) {
                type = "source:" + type;
            }
            if (annotation.getName().equalsIgnoreCase(SiddhiConstants.ANNOTATION_SINK)) {
                type = "sink:" + type;
            }
            if (annotation.getName().equalsIgnoreCase(SiddhiConstants.ANNOTATION_STORE)) {
                type = "store:" + type;
            }
            if (type != null && deprecatedExtensions.containsKey(type)) {
                Class ext = deprecatedExtensions.get(type);
                Extension extAnnotation = (Extension) ext.getAnnotation(Extension.class);
                String warning = extAnnotation.deprecationNotice().isEmpty() ? type + " is being deprecated." : extAnnotation.deprecationNotice();
                warnings.add(warning);
                log.warn(warning);
            }
        }
    }
}
Also used : Extension(io.siddhi.annotation.Extension) ArrayList(java.util.ArrayList) AbstractDefinition(io.siddhi.query.api.definition.AbstractDefinition) Annotation(io.siddhi.query.api.annotation.Annotation)

Example 3 with Annotation

use of io.siddhi.query.api.annotation.Annotation in project siddhi by wso2.

the class AggregationParser method initDefaultTables.

private static HashMap<TimePeriod.Duration, Table> initDefaultTables(String aggregatorName, List<TimePeriod.Duration> aggregationDurations, StreamDefinition streamDefinition, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder, List<Annotation> annotations, List<Variable> groupByVariableList, boolean isProcessingOnExternalTime, boolean enablePartitioning) {
    HashMap<TimePeriod.Duration, Table> aggregationTableMap = new HashMap<>();
    // Create annotations for primary key
    Annotation primaryKeyAnnotation = new Annotation(SiddhiConstants.ANNOTATION_PRIMARY_KEY);
    primaryKeyAnnotation.element(null, AGG_START_TIMESTAMP_COL);
    if (enablePartitioning) {
        primaryKeyAnnotation.element(null, AGG_SHARD_ID_COL);
    }
    if (isProcessingOnExternalTime) {
        primaryKeyAnnotation.element(null, AGG_EXTERNAL_TIMESTAMP_COL);
    }
    for (Variable groupByVariable : groupByVariableList) {
        primaryKeyAnnotation.element(null, groupByVariable.getAttributeName());
    }
    if (streamDefinition.getAttributeList().contains(new Attribute(AGG_LAST_TIMESTAMP_COL, Attribute.Type.LONG))) {
        Annotation indexAnnotation = new Annotation(SiddhiConstants.ANNOTATION_INDEX);
        indexAnnotation.element(null, AGG_LAST_TIMESTAMP_COL);
        annotations.add(indexAnnotation);
    }
    annotations.add(primaryKeyAnnotation);
    for (TimePeriod.Duration duration : aggregationDurations) {
        String tableId = aggregatorName + "_" + duration.toString();
        TableDefinition tableDefinition = TableDefinition.id(tableId);
        for (Attribute attribute : streamDefinition.getAttributeList()) {
            tableDefinition.attribute(attribute.getName(), attribute.getType());
        }
        annotations.forEach(tableDefinition::annotation);
        siddhiAppRuntimeBuilder.defineTable(tableDefinition);
        aggregationTableMap.put(duration, siddhiAppRuntimeBuilder.getTableMap().get(tableId));
    }
    return aggregationTableMap;
}
Also used : Table(io.siddhi.core.table.Table) Variable(io.siddhi.query.api.expression.Variable) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Attribute(io.siddhi.query.api.definition.Attribute) OutputAttribute(io.siddhi.query.api.execution.query.selection.OutputAttribute) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) TableDefinition(io.siddhi.query.api.definition.TableDefinition) Annotation(io.siddhi.query.api.annotation.Annotation)

Example 4 with Annotation

use of io.siddhi.query.api.annotation.Annotation in project siddhi by wso2.

the class TestStoreForCacheMiss method init.

@Override
protected void init(TableDefinition tableDefinition, ConfigReader configReader) {
    inMemoryTable = new InMemoryTable();
    MetaStreamEvent cacheTableMetaStreamEvent = new MetaStreamEvent();
    cacheTableMetaStreamEvent.addInputDefinition(tableDefinition);
    for (Attribute attribute : tableDefinition.getAttributeList()) {
        cacheTableMetaStreamEvent.addOutputData(attribute);
    }
    StreamEventCloner testTableStreamEventCloner = new StreamEventCloner(cacheTableMetaStreamEvent, storeEventPool);
    TableDefinition testStoreContainingIMTableDefinition = TableDefinition.id(tableDefinition.getId());
    for (Attribute attribute : tableDefinition.getAttributeList()) {
        testStoreContainingIMTableDefinition.attribute(attribute.getName(), attribute.getType());
    }
    for (Annotation annotation : tableDefinition.getAnnotations()) {
        if (!annotation.getName().equalsIgnoreCase("Store")) {
            testStoreContainingIMTableDefinition.annotation(annotation);
        }
    }
    inMemoryTable.init(testStoreContainingIMTableDefinition, storeEventPool, testTableStreamEventCloner, configReader, siddhiAppContext, recordTableHandler);
}
Also used : InMemoryTable(io.siddhi.core.table.InMemoryTable) Attribute(io.siddhi.query.api.definition.Attribute) StreamEventCloner(io.siddhi.core.event.stream.StreamEventCloner) TableDefinition(io.siddhi.query.api.definition.TableDefinition) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) Annotation(io.siddhi.query.api.annotation.Annotation)

Example 5 with Annotation

use of io.siddhi.query.api.annotation.Annotation in project siddhi by wso2.

the class TestStoreForCachePreLoading method init.

@Override
protected void init(TableDefinition tableDefinition, ConfigReader configReader) {
    inMemoryTable = new InMemoryTable();
    MetaStreamEvent cacheTableMetaStreamEvent = new MetaStreamEvent();
    cacheTableMetaStreamEvent.addInputDefinition(tableDefinition);
    for (Attribute attribute : tableDefinition.getAttributeList()) {
        cacheTableMetaStreamEvent.addOutputData(attribute);
    }
    StreamEventCloner testTableStreamEventCloner = new StreamEventCloner(cacheTableMetaStreamEvent, storeEventPool);
    TableDefinition testStoreContainingIMTableDefinition = TableDefinition.id(tableDefinition.getId());
    for (Attribute attribute : tableDefinition.getAttributeList()) {
        testStoreContainingIMTableDefinition.attribute(attribute.getName(), attribute.getType());
    }
    for (Annotation annotation : tableDefinition.getAnnotations()) {
        if (!annotation.getName().equalsIgnoreCase("Store")) {
            testStoreContainingIMTableDefinition.annotation(annotation);
        }
    }
    inMemoryTable.init(testStoreContainingIMTableDefinition, storeEventPool, testTableStreamEventCloner, configReader, siddhiAppContext, recordTableHandler);
    ComplexEventChunk<StreamEvent> originalData = new ComplexEventChunk<>();
    StreamEvent data1 = new StreamEvent(0, 0, 3);
    data1.setOutputData(new Object[] { "WSO2", 55.6f, 100L });
    originalData.add(data1);
    StreamEvent data2 = new StreamEvent(0, 0, 3);
    data2.setOutputData(new Object[] { "IBM", 75.6f, 100L });
    originalData.add(data2);
    inMemoryTable.add(originalData);
}
Also used : InMemoryTable(io.siddhi.core.table.InMemoryTable) ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) Attribute(io.siddhi.query.api.definition.Attribute) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) StreamEventCloner(io.siddhi.core.event.stream.StreamEventCloner) TableDefinition(io.siddhi.query.api.definition.TableDefinition) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) Annotation(io.siddhi.query.api.annotation.Annotation)

Aggregations

Annotation (io.siddhi.query.api.annotation.Annotation)21 Attribute (io.siddhi.query.api.definition.Attribute)10 SiddhiAppCreationException (io.siddhi.core.exception.SiddhiAppCreationException)8 Element (io.siddhi.query.api.annotation.Element)8 ArrayList (java.util.ArrayList)7 MetaStreamEvent (io.siddhi.core.event.stream.MetaStreamEvent)6 TableDefinition (io.siddhi.query.api.definition.TableDefinition)6 HashMap (java.util.HashMap)6 StreamEventCloner (io.siddhi.core.event.stream.StreamEventCloner)5 InMemoryTable (io.siddhi.core.table.InMemoryTable)4 Table (io.siddhi.core.table.Table)4 ConfigReader (io.siddhi.core.util.config.ConfigReader)4 OutputAttribute (io.siddhi.query.api.execution.query.selection.OutputAttribute)4 Extension (io.siddhi.query.api.extension.Extension)4 Map (java.util.Map)4 SiddhiAppContext (io.siddhi.core.config.SiddhiAppContext)3 StreamEventFactory (io.siddhi.core.event.stream.StreamEventFactory)3 VariableExpressionExecutor (io.siddhi.core.executor.VariableExpressionExecutor)3 StreamDefinition (io.siddhi.query.api.definition.StreamDefinition)3 SiddhiQueryContext (io.siddhi.core.config.SiddhiQueryContext)2