Search in sources :

Example 1 with Names

use of org.wso2.ballerinalang.compiler.util.Names in project siddhi by wso2.

the class SumIncrementalAttributeAggregator method init.

@Override
public void init(String attributeName, Attribute.Type attributeType) {
    Attribute sum;
    Expression sumInitialValue;
    if (attributeName == null) {
        throw new SiddhiAppCreationException("Sum incremental attribute aggregation cannot be executed " + "when no parameters are given");
    }
    if (attributeType.equals(Attribute.Type.FLOAT) || attributeType.equals(Attribute.Type.DOUBLE)) {
        sum = new Attribute("AGG_SUM_".concat(attributeName), Attribute.Type.DOUBLE);
        sumInitialValue = Expression.function("convert", Expression.variable(attributeName), Expression.value("double"));
        returnType = Attribute.Type.DOUBLE;
    } else if (attributeType.equals(Attribute.Type.INT) || attributeType.equals(Attribute.Type.LONG)) {
        sum = new Attribute("AGG_SUM_".concat(attributeName), Attribute.Type.LONG);
        sumInitialValue = Expression.function("convert", Expression.variable(attributeName), Expression.value("long"));
        returnType = Attribute.Type.LONG;
    } else {
        throw new SiddhiAppRuntimeException("Sum aggregation cannot be executed on attribute type " + attributeType.toString());
    }
    this.baseAttributes = new Attribute[] { sum };
    // Original attribute names
    this.baseAttributesInitialValues = new Expression[] { sumInitialValue };
    assert baseAttributes.length == baseAttributesInitialValues.length;
}
Also used : ReturnAttribute(org.wso2.siddhi.annotation.ReturnAttribute) Attribute(org.wso2.siddhi.query.api.definition.Attribute) Expression(org.wso2.siddhi.query.api.expression.Expression) SiddhiAppCreationException(org.wso2.siddhi.core.exception.SiddhiAppCreationException) SiddhiAppRuntimeException(org.wso2.siddhi.core.exception.SiddhiAppRuntimeException)

Example 2 with Names

use of org.wso2.ballerinalang.compiler.util.Names in project siddhi by wso2.

the class AggregationParser method parse.

public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
    if (aggregationDefinition == null) {
        throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
    }
    if (aggregationDefinition.getTimePeriod() == null) {
        throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
    }
    if (aggregationDefinition.getSelector() == null) {
        throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
    }
    if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
        throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
    }
    try {
        List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
        String aggregatorName = aggregationDefinition.getId();
        StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
        // Get original meta for later use.
        MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
        // Create new meta stream event.
        // This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
        // onAfterWindowData array
        // Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
        // AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
        // finalListOfIncrementalAttributes
        // To enter data as onAfterWindowData
        incomingMetaStreamEvent.initializeAfterWindowData();
        List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
        List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
        List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
        boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
        // Expressions to get
        List<Expression> outputExpressions = new ArrayList<>();
        // final aggregate outputs. e.g avg = sum/count
        // Expression executors to get
        List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
        // final aggregate outputs. e.g avg = sum/count
        populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
        int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
        List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
        StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
        incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
        incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
        MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
        for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
            incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
            processedMetaStreamEvent.addOutputData(attribute);
        }
        incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
        processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
        processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
        // Executors of processing meta
        List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
        boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
        List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
        outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
        // Create group by key generator
        GroupByKeyGenerator groupByKeyGenerator = null;
        if (groupBy) {
            groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
        }
        // Create new scheduler
        EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
        LockWrapper lockWrapper = new LockWrapper(aggregatorName);
        lockWrapper.setLock(new ReentrantLock());
        Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
        scheduler.init(lockWrapper, aggregatorName);
        scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
        QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
        QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
        QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
        QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
        List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
        Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
        int bufferSize = 0;
        Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
        if (element != null) {
            try {
                bufferSize = Integer.parseInt(element.getValue());
            } catch (NumberFormatException e) {
                throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
            }
        }
        if (bufferSize > 0) {
            TimePeriod.Duration rootDuration = incrementalDurations.get(0);
            if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
                throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
            }
            if (!isProcessingOnExternalTime) {
                throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
            // Buffer size is used to process out of order events. However, events would never be out of
            // order if they are processed based on event arrival time.
            }
        } else if (bufferSize < 0) {
            throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
        }
        boolean ignoreEventsOlderThanBuffer = false;
        element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
        if (element != null) {
            if (element.getValue().equalsIgnoreCase("true")) {
                ignoreEventsOlderThanBuffer = true;
            } else if (!element.getValue().equalsIgnoreCase("false")) {
                throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
            }
        }
        Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
        // Recreate in-memory data from tables
        RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
        IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
        rootIncrementalExecutor.setScheduler(scheduler);
        // Connect entry valve to root incremental executor
        entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
        QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
        LatencyTracker latencyTrackerFind = null;
        LatencyTracker latencyTrackerInsert = null;
        ThroughputTracker throughputTrackerFind = null;
        ThroughputTracker throughputTrackerInsert = null;
        if (siddhiAppContext.getStatisticsManager() != null) {
            latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
            latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
            throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
            throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
        }
        streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
        List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
        ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
        return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
    } catch (Throwable t) {
        ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
        throw t;
    }
}
Also used : Variable(org.wso2.siddhi.query.api.expression.Variable) OutputAttribute(org.wso2.siddhi.query.api.execution.query.selection.OutputAttribute) Attribute(org.wso2.siddhi.query.api.definition.Attribute) Scheduler(org.wso2.siddhi.core.util.Scheduler) GroupByKeyGenerator(org.wso2.siddhi.core.query.selector.GroupByKeyGenerator) Element(org.wso2.siddhi.query.api.annotation.Element) ArrayList(java.util.ArrayList) IncrementalExecutor(org.wso2.siddhi.core.aggregation.IncrementalExecutor) StreamEventPool(org.wso2.siddhi.core.event.stream.StreamEventPool) StreamRuntime(org.wso2.siddhi.core.query.input.stream.StreamRuntime) SingleStreamRuntime(org.wso2.siddhi.core.query.input.stream.single.SingleStreamRuntime) IncrementalAggregationProcessor(org.wso2.siddhi.core.aggregation.IncrementalAggregationProcessor) AggregationRuntime(org.wso2.siddhi.core.aggregation.AggregationRuntime) ReentrantLock(java.util.concurrent.locks.ReentrantLock) ThroughputTracker(org.wso2.siddhi.core.util.statistics.ThroughputTracker) Table(org.wso2.siddhi.core.table.Table) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(org.wso2.siddhi.core.executor.ExpressionExecutor) StreamDefinition(org.wso2.siddhi.query.api.definition.StreamDefinition) SiddhiAppCreationException(org.wso2.siddhi.core.exception.SiddhiAppCreationException) TimePeriod(org.wso2.siddhi.query.api.aggregation.TimePeriod) SingleStreamRuntime(org.wso2.siddhi.core.query.input.stream.single.SingleStreamRuntime) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) EntryValveExecutor(org.wso2.siddhi.core.query.input.stream.single.EntryValveExecutor) LockWrapper(org.wso2.siddhi.core.util.lock.LockWrapper) Expression(org.wso2.siddhi.query.api.expression.Expression) IncrementalAttributeAggregator(org.wso2.siddhi.core.query.selector.attribute.aggregator.incremental.IncrementalAttributeAggregator) RecreateInMemoryData(org.wso2.siddhi.core.aggregation.RecreateInMemoryData) LatencyTracker(org.wso2.siddhi.core.util.statistics.LatencyTracker) MetaStreamEvent(org.wso2.siddhi.core.event.stream.MetaStreamEvent)

Example 3 with Names

use of org.wso2.ballerinalang.compiler.util.Names in project carbon-apimgt by wso2.

the class ApiDAOImpl method getAPIsByStatus.

/**
 * @see ApiDAO#getAPIsByStatus(Set, List, List)
 */
@Override
@SuppressFBWarnings("SQL_PREPARED_STATEMENT_GENERATED_FROM_NONCONSTANT_STRING")
public List<API> getAPIsByStatus(Set<String> roles, List<String> statuses, List<String> labels) throws APIMgtDAOException {
    // check for null at the beginning before constructing the query to retrieve APIs from database
    if (roles == null || statuses == null) {
        String errorMessage = "Role list or API status list should not be null to retrieve APIs.";
        log.error(errorMessage);
        throw new APIMgtDAOException(errorMessage);
    }
    // the below query will be used to retrieve the union of,
    // published/prototyped APIs (statuses) with public visibility and
    // published/prototyped APIs with restricted visibility where APIs are restricted based on roles of the user
    String labelQuery = null;
    if (labels.isEmpty()) {
        labelQuery = "SELECT LABEL_ID FROM  AM_LABELS WHERE TYPE_NAME='STORE'";
    } else {
        labelQuery = "SELECT LABEL_ID FROM  AM_LABELS WHERE NAME IN ( " + DAOUtil.getParameterString(labels.size()) + ") AND TYPE_NAME='STORE'";
    }
    final String query = "Select UUID, PROVIDER, NAME, CONTEXT, VERSION, DESCRIPTION, CURRENT_LC_STATUS, " + "LIFECYCLE_INSTANCE_ID, LC_WORKFLOW_STATUS, SECURITY_SCHEME  FROM (" + API_SUMMARY_SELECT + " WHERE " + "VISIBILITY = '" + API.Visibility.PUBLIC + "' " + "AND " + "CURRENT_LC_STATUS  IN (" + DAOUtil.getParameterString(statuses.size()) + ") AND " + "API_TYPE_ID = (SELECT TYPE_ID FROM AM_API_TYPES WHERE TYPE_NAME = ?)" + "UNION " + API_SUMMARY_SELECT + " WHERE " + "VISIBILITY = '" + API.Visibility.RESTRICTED + "' " + "AND " + "UUID IN (SELECT API_ID FROM AM_API_VISIBLE_ROLES WHERE ROLE IN " + "(" + DAOUtil.getParameterString(roles.size()) + ")) " + " AND CURRENT_LC_STATUS  IN (" + DAOUtil.getParameterString(statuses.size()) + ") AND " + " API_TYPE_ID = (SELECT TYPE_ID FROM AM_API_TYPES WHERE TYPE_NAME = ?)) A" + " JOIN AM_API_LABEL_MAPPING LM ON A.UUID=LM.API_ID WHERE LM.LABEL_ID IN (" + labelQuery + ")";
    try (Connection connection = DAOUtil.getConnection();
        PreparedStatement statement = connection.prepareStatement(query)) {
        int i = 0;
        // put desired API status into the query (to get APIs with public visibility)
        for (String status : statuses) {
            statement.setString(++i, status);
        }
        statement.setString(++i, ApiType.STANDARD.toString());
        // put desired roles into the query
        for (String role : roles) {
            statement.setString(++i, role);
        }
        // put desired API status into the query (to get APIs with restricted visibility)
        for (String status : statuses) {
            statement.setString(++i, status);
        }
        statement.setString(++i, ApiType.STANDARD.toString());
        // Set the label names in the query
        for (String label : labels) {
            statement.setString(++i, label);
        }
        return constructAPISummaryList(connection, statement);
    } catch (SQLException e) {
        String errorMessage = "Error while retrieving API list in store.";
        throw new APIMgtDAOException(errorMessage, e);
    }
}
Also used : APIMgtDAOException(org.wso2.carbon.apimgt.core.exception.APIMgtDAOException) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) Endpoint(org.wso2.carbon.apimgt.core.models.Endpoint) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings)

Example 4 with Names

use of org.wso2.ballerinalang.compiler.util.Names in project carbon-apimgt by wso2.

the class ApiDAOImplIT method testSearchAPIs.

@Test
public void testSearchAPIs() throws Exception {
    ApiDAO apiDAO = DAOFactory.getApiDAO();
    Set<String> userRoles = new HashSet<>(Arrays.asList(CUSTOMER_ROLE, MANAGER_ROLE, EMPLOYEE_ROLE));
    // Sample API names
    final String mixedCaseString = "Mixed Case";
    final String lowerCaseString = "lower case";
    final String upperCaseString = "UPPER CASE";
    final String charSymbolNumString = "mi ##symbol 12num";
    final String symbolSpaceString = "_under & Score_";
    // Search string cases
    final String commonMixedCaseSearchString = "CaSe";
    final String commonLowerCaseSearchString = "case";
    final String commonUpperCaseSearchString = "CASE";
    final String symbolSearchString = "##symbol";
    // In some databases numbers are not used in indexing
    final String numberSearchString = "12n";
    // API Provider, the person who owns the API
    final String provider = "John";
    // Create test data
    Map<String, API> apis = new HashMap<>();
    apis.put(mixedCaseString, SampleTestObjectCreator.createUniqueAPI().name(mixedCaseString).provider(provider).build());
    apis.put(lowerCaseString, SampleTestObjectCreator.createUniqueAPI().name(lowerCaseString).provider(provider).build());
    apis.put(upperCaseString, SampleTestObjectCreator.createUniqueAPI().name(upperCaseString).provider(provider).build());
    apis.put(charSymbolNumString, SampleTestObjectCreator.createUniqueAPI().name(charSymbolNumString).provider(provider).build());
    apis.put(symbolSpaceString, SampleTestObjectCreator.createUniqueAPI().name(symbolSpaceString).provider(provider).build());
    // Add APIs
    testAddGetEndpoint();
    for (Map.Entry<String, API> entry : apis.entrySet()) {
        API api = entry.getValue();
        apiDAO.addAPI(api);
        // Replace with summary object for validation
        apis.put(entry.getKey(), SampleTestObjectCreator.getSummaryFromAPI(api));
    }
    // Sleep for indexing
    Thread.sleep(5000);
    // Expected common string formatApiSearch result
    List<API> commonStringResult = new ArrayList<>();
    commonStringResult.add(apis.get(mixedCaseString));
    commonStringResult.add(apis.get(lowerCaseString));
    commonStringResult.add(apis.get(upperCaseString));
    // Search by common mixed case
    List<API> apiList = apiDAO.searchAPIs(new HashSet<>(), provider, commonMixedCaseSearchString, 0, 10);
    Assert.assertEquals(apiList.size(), 3);
    Assert.assertTrue(APIUtils.isListsEqualIgnoreOrder(apiList, commonStringResult, new APIComparator()), TestUtil.printListDiff(apiList, commonStringResult));
    // Search by common lower case
    apiList = apiDAO.searchAPIs(userRoles, provider, commonLowerCaseSearchString, 0, 10);
    Assert.assertEquals(apiList.size(), 3);
    Assert.assertTrue(APIUtils.isListsEqualIgnoreOrder(apiList, commonStringResult, new APIComparator()), TestUtil.printListDiff(apiList, commonStringResult));
    // Search by common upper case
    apiList = apiDAO.searchAPIs(userRoles, provider, commonUpperCaseSearchString, 0, 10);
    Assert.assertEquals(apiList.size(), 3);
    Assert.assertTrue(APIUtils.isListsEqualIgnoreOrder(apiList, commonStringResult, new APIComparator()), TestUtil.printListDiff(apiList, commonStringResult));
    // Search by symbol
    apiList = apiDAO.searchAPIs(userRoles, provider, symbolSearchString, 0, 10);
    Assert.assertEquals(apiList.size(), 1);
    API actualAPI = apiList.get(0);
    API expectedAPI = apis.get(charSymbolNumString);
    Assert.assertEquals(actualAPI, expectedAPI, TestUtil.printDiff(actualAPI, expectedAPI));
    // Search by number
    apiList = apiDAO.searchAPIs(userRoles, provider, numberSearchString, 0, 10);
    Assert.assertEquals(apiList.size(), 1);
    actualAPI = apiList.get(0);
    expectedAPI = apis.get(charSymbolNumString);
    Assert.assertEquals(actualAPI, expectedAPI, TestUtil.printDiff(actualAPI, expectedAPI));
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) APIComparator(org.wso2.carbon.apimgt.core.util.APIComparator) CompositeAPI(org.wso2.carbon.apimgt.core.models.CompositeAPI) API(org.wso2.carbon.apimgt.core.models.API) HashMap(java.util.HashMap) Map(java.util.Map) ApiDAO(org.wso2.carbon.apimgt.core.dao.ApiDAO) HashSet(java.util.HashSet) Test(org.testng.annotations.Test)

Example 5 with Names

use of org.wso2.ballerinalang.compiler.util.Names in project carbon-apimgt by wso2.

the class APIPublisherImpl method replaceGroupNamesWithId.

/**
 * This method replaces the groupId field's value to the role id instead of the name passed by the user
 *
 * @param permissionString - the permission json string which contains role names in groupId field
 * @return permission string with replaced groupId
 * @throws ParseException         - if there is an error parsing the json string
 * @throws APIManagementException - if there is an error getting the IdentityProvider instance
 */
private String replaceGroupNamesWithId(String permissionString) throws ParseException, APIManagementException {
    JSONArray updatedPermissionArray = new JSONArray();
    JSONParser jsonParser = new JSONParser();
    JSONArray originalPermissionArray = (JSONArray) jsonParser.parse(permissionString);
    try {
        for (Object permissionObj : originalPermissionArray) {
            JSONObject jsonObject = (JSONObject) permissionObj;
            String groupName = (String) jsonObject.get(APIMgtConstants.Permission.GROUP_ID);
            String groupId = getIdentityProvider().getRoleId(groupName);
            JSONObject updatedPermissionJsonObj = new JSONObject();
            updatedPermissionJsonObj.put(APIMgtConstants.Permission.GROUP_ID, groupId);
            updatedPermissionJsonObj.put(APIMgtConstants.Permission.PERMISSION, jsonObject.get(APIMgtConstants.Permission.PERMISSION));
            updatedPermissionArray.add(updatedPermissionJsonObj);
        }
    } catch (IdentityProviderException e) {
        String errorMessage = "There are invalid roles in the permission string";
        log.error(errorMessage, e);
        throw new APIManagementException(errorMessage, e, ExceptionCodes.UNSUPPORTED_ROLE);
    }
    return updatedPermissionArray.toJSONString();
}
Also used : JSONObject(org.json.simple.JSONObject) APIManagementException(org.wso2.carbon.apimgt.core.exception.APIManagementException) JSONArray(org.json.simple.JSONArray) JSONParser(org.json.simple.parser.JSONParser) JSONObject(org.json.simple.JSONObject) IdentityProviderException(org.wso2.carbon.apimgt.core.exception.IdentityProviderException)

Aggregations

ArrayList (java.util.ArrayList)14 Query (javax.persistence.Query)8 TaskStatus (org.wso2.carbon.humantask.core.dao.TaskStatus)8 HashMap (java.util.HashMap)7 Test (org.testng.annotations.Test)6 Name (org.wso2.ballerinalang.compiler.util.Name)5 IOException (java.io.IOException)4 Map (java.util.Map)4 BLangVariable (org.wso2.ballerinalang.compiler.tree.BLangVariable)4 BLangXMLQName (org.wso2.ballerinalang.compiler.tree.expressions.BLangXMLQName)4 CompilerContext (org.wso2.ballerinalang.compiler.util.CompilerContext)4 APIManagementException (org.wso2.carbon.apimgt.api.APIManagementException)4 Iterator (java.util.Iterator)3 OMElement (org.apache.axiom.om.OMElement)3 HTTPTestRequest (org.ballerinalang.test.services.testutils.HTTPTestRequest)3 BLangPackage (org.wso2.ballerinalang.compiler.tree.BLangPackage)3 BLangRecordLiteral (org.wso2.ballerinalang.compiler.tree.expressions.BLangRecordLiteral)3 BLangAssignment (org.wso2.ballerinalang.compiler.tree.statements.BLangAssignment)3 Tier (org.wso2.carbon.apimgt.api.model.Tier)3 JsonObject (com.google.gson.JsonObject)2