Search in sources :

Example 1 with Instance

use of org.wso2.carbon.bpel.core.ode.integration.jmx.Instance in project siddhi by wso2.

the class AggregationParser method parse.

public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
    if (aggregationDefinition == null) {
        throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
    }
    if (aggregationDefinition.getTimePeriod() == null) {
        throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
    }
    if (aggregationDefinition.getSelector() == null) {
        throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
    }
    if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
        throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
    }
    try {
        List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
        String aggregatorName = aggregationDefinition.getId();
        StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
        // Get original meta for later use.
        MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
        // Create new meta stream event.
        // This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
        // onAfterWindowData array
        // Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
        // AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
        // finalListOfIncrementalAttributes
        // To enter data as onAfterWindowData
        incomingMetaStreamEvent.initializeAfterWindowData();
        List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
        List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
        List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
        boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
        // Expressions to get
        List<Expression> outputExpressions = new ArrayList<>();
        // final aggregate outputs. e.g avg = sum/count
        // Expression executors to get
        List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
        // final aggregate outputs. e.g avg = sum/count
        populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
        int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
        List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
        StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
        incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
        incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
        MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
        for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
            incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
            processedMetaStreamEvent.addOutputData(attribute);
        }
        incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
        processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
        processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
        // Executors of processing meta
        List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
        boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
        List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
        outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
        // Create group by key generator
        GroupByKeyGenerator groupByKeyGenerator = null;
        if (groupBy) {
            groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
        }
        // Create new scheduler
        EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
        LockWrapper lockWrapper = new LockWrapper(aggregatorName);
        lockWrapper.setLock(new ReentrantLock());
        Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
        scheduler.init(lockWrapper, aggregatorName);
        scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
        QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
        QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
        QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
        QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
        List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
        Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
        int bufferSize = 0;
        Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
        if (element != null) {
            try {
                bufferSize = Integer.parseInt(element.getValue());
            } catch (NumberFormatException e) {
                throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
            }
        }
        if (bufferSize > 0) {
            TimePeriod.Duration rootDuration = incrementalDurations.get(0);
            if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
                throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
            }
            if (!isProcessingOnExternalTime) {
                throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
            // Buffer size is used to process out of order events. However, events would never be out of
            // order if they are processed based on event arrival time.
            }
        } else if (bufferSize < 0) {
            throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
        }
        boolean ignoreEventsOlderThanBuffer = false;
        element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
        if (element != null) {
            if (element.getValue().equalsIgnoreCase("true")) {
                ignoreEventsOlderThanBuffer = true;
            } else if (!element.getValue().equalsIgnoreCase("false")) {
                throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
            }
        }
        Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
        // Recreate in-memory data from tables
        RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
        IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
        rootIncrementalExecutor.setScheduler(scheduler);
        // Connect entry valve to root incremental executor
        entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
        QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
        LatencyTracker latencyTrackerFind = null;
        LatencyTracker latencyTrackerInsert = null;
        ThroughputTracker throughputTrackerFind = null;
        ThroughputTracker throughputTrackerInsert = null;
        if (siddhiAppContext.getStatisticsManager() != null) {
            latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
            latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
            throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
            throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
        }
        streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
        List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
        ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
        return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
    } catch (Throwable t) {
        ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
        throw t;
    }
}
Also used : Variable(org.wso2.siddhi.query.api.expression.Variable) OutputAttribute(org.wso2.siddhi.query.api.execution.query.selection.OutputAttribute) Attribute(org.wso2.siddhi.query.api.definition.Attribute) Scheduler(org.wso2.siddhi.core.util.Scheduler) GroupByKeyGenerator(org.wso2.siddhi.core.query.selector.GroupByKeyGenerator) Element(org.wso2.siddhi.query.api.annotation.Element) ArrayList(java.util.ArrayList) IncrementalExecutor(org.wso2.siddhi.core.aggregation.IncrementalExecutor) StreamEventPool(org.wso2.siddhi.core.event.stream.StreamEventPool) StreamRuntime(org.wso2.siddhi.core.query.input.stream.StreamRuntime) SingleStreamRuntime(org.wso2.siddhi.core.query.input.stream.single.SingleStreamRuntime) IncrementalAggregationProcessor(org.wso2.siddhi.core.aggregation.IncrementalAggregationProcessor) AggregationRuntime(org.wso2.siddhi.core.aggregation.AggregationRuntime) ReentrantLock(java.util.concurrent.locks.ReentrantLock) ThroughputTracker(org.wso2.siddhi.core.util.statistics.ThroughputTracker) Table(org.wso2.siddhi.core.table.Table) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(org.wso2.siddhi.core.executor.ExpressionExecutor) StreamDefinition(org.wso2.siddhi.query.api.definition.StreamDefinition) SiddhiAppCreationException(org.wso2.siddhi.core.exception.SiddhiAppCreationException) TimePeriod(org.wso2.siddhi.query.api.aggregation.TimePeriod) SingleStreamRuntime(org.wso2.siddhi.core.query.input.stream.single.SingleStreamRuntime) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) EntryValveExecutor(org.wso2.siddhi.core.query.input.stream.single.EntryValveExecutor) LockWrapper(org.wso2.siddhi.core.util.lock.LockWrapper) Expression(org.wso2.siddhi.query.api.expression.Expression) IncrementalAttributeAggregator(org.wso2.siddhi.core.query.selector.attribute.aggregator.incremental.IncrementalAttributeAggregator) RecreateInMemoryData(org.wso2.siddhi.core.aggregation.RecreateInMemoryData) LatencyTracker(org.wso2.siddhi.core.util.statistics.LatencyTracker) MetaStreamEvent(org.wso2.siddhi.core.event.stream.MetaStreamEvent)

Example 2 with Instance

use of org.wso2.carbon.bpel.core.ode.integration.jmx.Instance in project siddhi by wso2.

the class JoinInputStreamParser method insertJoinProcessorsAndGetFindable.

private static FindableProcessor insertJoinProcessorsAndGetFindable(JoinProcessor preJoinProcessor, JoinProcessor postJoinProcessor, SingleStreamRuntime streamRuntime, SiddhiAppContext siddhiAppContext, boolean outputExpectsExpiredEvents, String queryName, InputStream inputStream) {
    Processor lastProcessor = streamRuntime.getProcessorChain();
    Processor prevLastProcessor = null;
    if (lastProcessor != null) {
        while (lastProcessor.getNextProcessor() != null) {
            prevLastProcessor = lastProcessor;
            lastProcessor = lastProcessor.getNextProcessor();
        }
    }
    if (lastProcessor == null) {
        try {
            WindowProcessor windowProcessor = new LengthWindowProcessor();
            ExpressionExecutor[] expressionExecutors = new ExpressionExecutor[1];
            expressionExecutors[0] = new ConstantExpressionExecutor(0, Attribute.Type.INT);
            ConfigReader configReader = siddhiAppContext.getSiddhiContext().getConfigManager().generateConfigReader("", "length");
            windowProcessor.initProcessor(((MetaStreamEvent) streamRuntime.getMetaComplexEvent()).getLastInputDefinition(), expressionExecutors, configReader, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            lastProcessor = windowProcessor;
        } catch (Throwable t) {
            throw new SiddhiAppCreationException(t);
        }
    }
    if (lastProcessor instanceof FindableProcessor) {
        if (prevLastProcessor != null) {
            prevLastProcessor.setNextProcessor(preJoinProcessor);
        } else {
            streamRuntime.setProcessorChain(preJoinProcessor);
        }
        preJoinProcessor.setNextProcessor(lastProcessor);
        lastProcessor.setNextProcessor(postJoinProcessor);
        return (FindableProcessor) lastProcessor;
    } else {
        throw new OperationNotSupportedException("Stream " + ((MetaStreamEvent) streamRuntime.getMetaComplexEvent()).getLastInputDefinition().getId() + "'s last processor " + lastProcessor.getClass().getCanonicalName() + " is not an instance of " + FindableProcessor.class.getCanonicalName() + " hence join cannot be proceed");
    }
}
Also used : OperationNotSupportedException(org.wso2.siddhi.core.exception.OperationNotSupportedException) LengthWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.LengthWindowProcessor) FindableProcessor(org.wso2.siddhi.core.query.processor.stream.window.FindableProcessor) FindableProcessor(org.wso2.siddhi.core.query.processor.stream.window.FindableProcessor) WindowWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.WindowWindowProcessor) AggregateWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.AggregateWindowProcessor) TableWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.TableWindowProcessor) Processor(org.wso2.siddhi.core.query.processor.Processor) WindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.WindowProcessor) LengthWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.LengthWindowProcessor) JoinProcessor(org.wso2.siddhi.core.query.input.stream.join.JoinProcessor) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(org.wso2.siddhi.core.executor.ExpressionExecutor) ConstantExpressionExecutor(org.wso2.siddhi.core.executor.ConstantExpressionExecutor) SiddhiAppCreationException(org.wso2.siddhi.core.exception.SiddhiAppCreationException) ConfigReader(org.wso2.siddhi.core.util.config.ConfigReader) WindowWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.WindowWindowProcessor) AggregateWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.AggregateWindowProcessor) TableWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.TableWindowProcessor) WindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.WindowProcessor) LengthWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.LengthWindowProcessor) MetaStreamEvent(org.wso2.siddhi.core.event.stream.MetaStreamEvent) ConstantExpressionExecutor(org.wso2.siddhi.core.executor.ConstantExpressionExecutor)

Example 3 with Instance

use of org.wso2.carbon.bpel.core.ode.integration.jmx.Instance in project carbon-apimgt by wso2.

the class ApplicationDAOImpl method addApplication.

/**
 * Add a new instance of an Application
 *
 * @param application The {@link Application} object to be added
 * @throws APIMgtDAOException   If failed to add application.
 */
@Override
public void addApplication(Application application) throws APIMgtDAOException {
    final String addAppQuery = "INSERT INTO AM_APPLICATION (UUID, NAME, APPLICATION_POLICY_ID, " + "DESCRIPTION, APPLICATION_STATUS, CREATED_BY, CREATED_TIME, UPDATED_BY, " + "LAST_UPDATED_TIME) VALUES (?, ?,?,?,?,?,?,?,?)";
    try (Connection conn = DAOUtil.getConnection()) {
        conn.setAutoCommit(false);
        try (PreparedStatement ps = conn.prepareStatement(addAppQuery)) {
            ps.setString(1, application.getId());
            ps.setString(2, application.getName());
            ps.setString(3, application.getPolicy().getUuid());
            ps.setString(4, application.getDescription());
            if (APIMgtConstants.DEFAULT_APPLICATION_NAME.equals(application.getName())) {
                ps.setString(5, APIMgtConstants.ApplicationStatus.APPLICATION_APPROVED);
            } else {
                ps.setString(5, APIMgtConstants.ApplicationStatus.APPLICATION_CREATED);
            }
            ps.setString(6, application.getCreatedUser());
            ps.setTimestamp(7, Timestamp.valueOf(application.getCreatedTime()));
            ps.setString(8, application.getCreatedUser());
            ps.setTimestamp(9, Timestamp.valueOf(application.getCreatedTime()));
            ps.executeUpdate();
            addApplicationPermission(conn, application.getPermissionMap(), application.getId());
            conn.commit();
        } catch (SQLException ex) {
            conn.rollback();
            throw new APIMgtDAOException(DAOUtil.DAO_ERROR_PREFIX + "adding application: " + application.getId(), ex);
        } finally {
            conn.setAutoCommit(DAOUtil.isAutoCommit());
        }
    } catch (SQLException ex) {
        throw new APIMgtDAOException(DAOUtil.DAO_ERROR_PREFIX + "adding application: " + application.getId(), ex);
    }
}
Also used : APIMgtDAOException(org.wso2.carbon.apimgt.core.exception.APIMgtDAOException) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement)

Example 4 with Instance

use of org.wso2.carbon.bpel.core.ode.integration.jmx.Instance in project carbon-apimgt by wso2.

the class ApplicationDAOImpl method getApplication.

/**
 * Retrieve a given instance of an Application
 *
 * @param appId The UUID that uniquely identifies an Application
 * @return valid {@link Application} object or null
 * @throws APIMgtDAOException   If failed to retrieve application.
 */
@Override
public Application getApplication(String appId) throws APIMgtDAOException {
    final String completeGetAppQuery = GET_APPS_WITH_POLICY_QUERY + " AND APPLICATION.UUID = ?";
    Application application;
    try (Connection conn = DAOUtil.getConnection();
        PreparedStatement ps = conn.prepareStatement(completeGetAppQuery)) {
        ps.setString(1, appId);
        try (ResultSet rs = ps.executeQuery()) {
            application = this.createApplicationFromResultSet(rs);
            if (application == null) {
                throw new APIMgtDAOException("Application is not available in the system.", ExceptionCodes.APPLICATION_NOT_FOUND);
            }
            application.setApplicationKeys(getApplicationKeys(appId));
        }
    } catch (SQLException ex) {
        throw new APIMgtDAOException(DAOUtil.DAO_ERROR_PREFIX + "getting application: " + appId, ex);
    }
    return application;
}
Also used : APIMgtDAOException(org.wso2.carbon.apimgt.core.exception.APIMgtDAOException) SQLException(java.sql.SQLException) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) Application(org.wso2.carbon.apimgt.core.models.Application)

Example 5 with Instance

use of org.wso2.carbon.bpel.core.ode.integration.jmx.Instance in project carbon-apimgt by wso2.

the class ApplicationDAOImpl method getApplicationByName.

/**
 * Retrieve a given instance of an Application
 *
 * @param appName Name of the Application
 * @param ownerId ID of the application owner.
 * @return valid {@link Application} object or null
 * @throws APIMgtDAOException   If failed to retrieve application.
 */
@Override
public Application getApplicationByName(String appName, String ownerId) throws APIMgtDAOException {
    final String completeGetAppQuery = GET_APPS_WITH_POLICY_QUERY + "AND APPLICATION.NAME = ? AND APPLICATION" + ".CREATED_BY = ?";
    Application application;
    try (Connection conn = DAOUtil.getConnection();
        PreparedStatement ps = conn.prepareStatement(completeGetAppQuery)) {
        ps.setString(1, appName);
        ps.setString(2, ownerId);
        try (ResultSet rs = ps.executeQuery()) {
            application = this.createApplicationFromResultSet(rs);
        }
        if (application == null) {
            throw new APIMgtDAOException("Application is not available in the system.", ExceptionCodes.APPLICATION_NOT_FOUND);
        }
    } catch (SQLException ex) {
        throw new APIMgtDAOException(DAOUtil.DAO_ERROR_PREFIX + "getting application(name: " + appName + ", " + "ownerId: " + ownerId + ")", ex);
    }
    return application;
}
Also used : APIMgtDAOException(org.wso2.carbon.apimgt.core.exception.APIMgtDAOException) SQLException(java.sql.SQLException) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) Application(org.wso2.carbon.apimgt.core.models.Application)

Aggregations

ArrayList (java.util.ArrayList)28 Test (org.junit.Test)23 Response (javax.ws.rs.core.Response)22 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)21 APIManagerFactory (org.wso2.carbon.apimgt.core.impl.APIManagerFactory)20 HashMap (java.util.HashMap)15 Path (javax.ws.rs.Path)15 RuntimeService (org.activiti.engine.RuntimeService)15 HistoricProcessInstance (org.activiti.engine.history.HistoricProcessInstance)15 InstanceManagementException (org.wso2.carbon.bpel.skeleton.ode.integration.mgt.services.InstanceManagementException)14 Produces (javax.ws.rs.Produces)13 RestResponseFactory (org.wso2.carbon.bpmn.rest.common.RestResponseFactory)13 IOException (java.io.IOException)12 APIMgtAdminServiceImpl (org.wso2.carbon.apimgt.core.impl.APIMgtAdminServiceImpl)12 GET (javax.ws.rs.GET)11 APIManagementException (org.wso2.carbon.apimgt.core.exception.APIManagementException)11 ActivitiObjectNotFoundException (org.activiti.engine.ActivitiObjectNotFoundException)10 ProcessInstance (org.activiti.engine.runtime.ProcessInstance)9 APIMgtDAOException (org.wso2.carbon.apimgt.core.exception.APIMgtDAOException)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8