Search in sources :

Example 1 with ArrayListValuedHashMap

use of org.apache.commons.collections4.multimap.ArrayListValuedHashMap in project herd by FINRAOS.

the class StorageFileDaoImpl method getStorageFilePathsByStorageUnitIds.

@Override
public MultiValuedMap<Integer, String> getStorageFilePathsByStorageUnitIds(List<Integer> storageUnitIds) {
    // Create a map that can hold a collection of values against each key.
    MultiValuedMap<Integer, String> result = new ArrayListValuedHashMap<>();
    // Retrieve the pagination size for the storage file paths query configured in the system.
    Integer paginationSize = configurationHelper.getProperty(ConfigurationValue.STORAGE_FILE_PATHS_QUERY_PAGINATION_SIZE, Integer.class);
    // Create the criteria builder and the criteria.
    CriteriaBuilder builder = entityManager.getCriteriaBuilder();
    CriteriaQuery<Tuple> criteria = builder.createTupleQuery();
    // The criteria root is the storage file.
    Root<StorageFileEntity> storageFileEntity = criteria.from(StorageFileEntity.class);
    // Get the columns.
    Path<Integer> storageUnitIdColumn = storageFileEntity.get(StorageFileEntity_.storageUnitId);
    Path<String> storageFilePathColumn = storageFileEntity.get(StorageFileEntity_.path);
    // Create the standard restrictions (i.e. the standard where clauses).
    Predicate queryRestriction = getPredicateForInClause(builder, storageUnitIdColumn, storageUnitIds);
    // Add the select clause.
    criteria.multiselect(storageUnitIdColumn, storageFilePathColumn);
    // Add the where clause.
    criteria.where(queryRestriction);
    // Execute the query using pagination and populate the result map.
    int startPosition = 0;
    while (true) {
        // Run the query to get a list of tuples back.
        List<Tuple> tuples = entityManager.createQuery(criteria).setFirstResult(startPosition).setMaxResults(paginationSize).getResultList();
        // Populate the result map from the returned tuples (i.e. 1 tuple for each row).
        for (Tuple tuple : tuples) {
            // Extract the tuple values.
            Integer storageUnitId = tuple.get(storageUnitIdColumn);
            String storageFilePath = tuple.get(storageFilePathColumn);
            // Update the result map.
            result.put(storageUnitId, storageFilePath);
        }
        // Break out of the while loop if we got less results than the pagination size.
        if (tuples.size() < paginationSize) {
            break;
        }
        // Increment the start position.
        startPosition += paginationSize;
    }
    return result;
}
Also used : CriteriaBuilder(javax.persistence.criteria.CriteriaBuilder) Predicate(javax.persistence.criteria.Predicate) StorageFileEntity(org.finra.herd.model.jpa.StorageFileEntity) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap) Tuple(javax.persistence.Tuple)

Example 2 with ArrayListValuedHashMap

use of org.apache.commons.collections4.multimap.ArrayListValuedHashMap in project dhis2-core by dhis2.

the class DataHandler method getAggregatedDataValueMap.

/**
 * Returns a mapping between dimension items and values for the given data
 * query and list of indicators. The dimensional items part of the indicator
 * numerators and denominators are used as dimensional item for the
 * aggregated values being retrieved. In case of circular references between
 * Indicators, an exception is thrown.
 *
 * @param params the {@link DataQueryParams}.
 * @param items the list of {@link DimensionalItemObject}.
 * @return a dimensional items to aggregate values map.
 */
private MultiValuedMap<String, DimensionItemObjectValue> getAggregatedDataValueMap(DataQueryParams params, List<DimensionalItemObject> items) {
    if (items.isEmpty()) {
        return new ArrayListValuedHashMap<>();
    }
    DimensionalObject dimension = new BaseDimensionalObject(DATA_X_DIM_ID, DATA_X, null, DISPLAY_NAME_DATA_X, items);
    DataQueryParams dataSourceParams = newBuilder(params).replaceDimension(dimension).withMeasureCriteria(new HashMap<>()).withIncludeNumDen(false).withSkipHeaders(true).withOutputFormat(ANALYTICS).withSkipMeta(true).build();
    Grid grid = dataAggregator.getAggregatedDataValueGrid(dataSourceParams);
    MultiValuedMap<String, DimensionItemObjectValue> result = new ArrayListValuedHashMap<>();
    if (isEmpty(grid.getRows())) {
        return result;
    }
    // Derive the Grid indexes for data, value and period based on the first
    // row of the Grid
    final int dataIndex = getGridIndexByDimensionItem(grid.getRow(0), items, 0);
    final int periodIndex = getGridIndexByDimensionItem(grid.getRow(0), params.getPeriods(), 1);
    final int valueIndex = grid.getWidth() - 1;
    final List<DimensionalItemObject> basePeriods = params.getPeriods();
    for (List<Object> row : grid.getRows()) {
        for (DimensionalItemObject dimensionalItem : findDimensionalItems((String) row.get(dataIndex), items)) {
            if (hasPeriod(row, periodIndex)) {
                addItemBasedOnPeriodOffset(result, periodIndex, valueIndex, row, dimensionalItem, basePeriods);
            } else {
                result.put(join(remove(row.toArray(new Object[0]), valueIndex), DIMENSION_SEP), new DimensionItemObjectValue(dimensionalItem, ((Number) row.get(valueIndex)).doubleValue()));
            }
        }
    }
    return result;
}
Also used : HashMap(java.util.HashMap) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap) BaseDimensionalObject(org.hisp.dhis.common.BaseDimensionalObject) Grid(org.hisp.dhis.common.Grid) DimensionItemObjectValue(org.hisp.dhis.common.DimensionItemObjectValue) PeriodType.getPeriodTypeFromIsoString(org.hisp.dhis.period.PeriodType.getPeriodTypeFromIsoString) DimensionalObject(org.hisp.dhis.common.DimensionalObject) BaseDimensionalObject(org.hisp.dhis.common.BaseDimensionalObject) DataQueryParams(org.hisp.dhis.analytics.DataQueryParams) EventQueryParams.fromDataQueryParams(org.hisp.dhis.analytics.event.EventQueryParams.fromDataQueryParams) DimensionalItemObject(org.hisp.dhis.common.DimensionalItemObject) AnalyticsUtils.getRoundedValueObject(org.hisp.dhis.analytics.util.AnalyticsUtils.getRoundedValueObject) DimensionalObject(org.hisp.dhis.common.DimensionalObject) BaseDimensionalObject(org.hisp.dhis.common.BaseDimensionalObject) DimensionalItemObject(org.hisp.dhis.common.DimensionalItemObject) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap)

Example 3 with ArrayListValuedHashMap

use of org.apache.commons.collections4.multimap.ArrayListValuedHashMap in project dhis2-core by dhis2.

the class QueryPlannerTest method testGetPermutationDimensionalItemValueMapCocEnabled.

@Test
void testGetPermutationDimensionalItemValueMapCocEnabled() {
    MultiValuedMap<String, DimensionItemObjectValue> aggregatedDataMap = new ArrayListValuedHashMap<>();
    aggregatedDataMap.put(makeKey(deA, coc, ouA, "2000Q1"), new DimensionItemObjectValue(deA, 1d));
    aggregatedDataMap.put(makeKey(deA, coc, ouA, "2000Q2"), new DimensionItemObjectValue(deA, 2d));
    aggregatedDataMap.put(makeKey(deA, coc, ouB, "2000Q1"), new DimensionItemObjectValue(deA, 3d));
    aggregatedDataMap.put(makeKey(deA, coc, ouB, "2000Q2"), new DimensionItemObjectValue(deA, 4d));
    aggregatedDataMap.put(makeKey(deB, coc, ouA, "2000Q1"), new DimensionItemObjectValue(deB, 5d));
    aggregatedDataMap.put(makeKey(deB, coc, ouA, "2000Q2"), new DimensionItemObjectValue(deB, 6d));
    aggregatedDataMap.put(makeKey(deB, coc, ouB, "2000Q1"), new DimensionItemObjectValue(deB, 7d));
    aggregatedDataMap.put(makeKey(deB, coc, ouB, "2000Q2"), new DimensionItemObjectValue(deB, 8d));
    // Method under test //
    Map<String, List<DimensionItemObjectValue>> permutationMap = DataQueryParams.getPermutationDimensionalItemValueMap(aggregatedDataMap);
    assertNotNull(permutationMap);
    String ouAQ1Key = ouA.getUid() + DIMENSION_SEP + "2000Q1";
    String ouAQ2Key = ouA.getUid() + DIMENSION_SEP + "2000Q2";
    String ouBQ1Key = ouB.getUid() + DIMENSION_SEP + "2000Q1";
    String ouBQ2Key = ouB.getUid() + DIMENSION_SEP + "2000Q2";
    List<DimensionItemObjectValue> ouAQ1 = permutationMap.get(ouAQ1Key);
    List<DimensionItemObjectValue> ouAQ2 = permutationMap.get(ouAQ2Key);
    List<DimensionItemObjectValue> ouBQ1 = permutationMap.get(ouBQ1Key);
    List<DimensionItemObjectValue> ouBQ2 = permutationMap.get(ouBQ2Key);
    assertEquals(2, ouAQ1.size());
    assertEquals(2, ouAQ2.size());
    assertEquals(2, ouBQ1.size());
    assertEquals(2, ouBQ2.size());
    List<DimensionItemObjectValue> ouAQ1Expected = new ArrayList<>();
    ouAQ1Expected.add(new DimensionItemObjectValue(deA, 1d));
    ouAQ1Expected.add(new DimensionItemObjectValue(deB, 5d));
    List<DimensionItemObjectValue> ouAQ2Expected = new ArrayList<>();
    ouAQ2Expected.add(new DimensionItemObjectValue(deA, 2d));
    ouAQ2Expected.add(new DimensionItemObjectValue(deB, 6d));
    List<DimensionItemObjectValue> ouBQ1Expected = new ArrayList<>();
    ouBQ1Expected.add(new DimensionItemObjectValue(deA, 3d));
    ouBQ1Expected.add(new DimensionItemObjectValue(deB, 7d));
    List<DimensionItemObjectValue> ouBQ2Expected = new ArrayList<>();
    ouBQ2Expected.add(new DimensionItemObjectValue(deA, 4d));
    ouBQ2Expected.add(new DimensionItemObjectValue(deB, 8d));
    assertCollectionsMatch(ouAQ1Expected, ouAQ1);
    assertCollectionsMatch(ouAQ2Expected, ouAQ2);
    assertCollectionsMatch(ouBQ1Expected, ouBQ1);
    assertCollectionsMatch(ouBQ2Expected, ouBQ2);
}
Also used : ArrayList(java.util.ArrayList) DimensionItemObjectValue(org.hisp.dhis.common.DimensionItemObjectValue) List(java.util.List) ArrayList(java.util.ArrayList) DimensionalObjectUtils.getList(org.hisp.dhis.common.DimensionalObjectUtils.getList) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap) Test(org.junit.jupiter.api.Test) DhisSpringTest(org.hisp.dhis.DhisSpringTest)

Example 4 with ArrayListValuedHashMap

use of org.apache.commons.collections4.multimap.ArrayListValuedHashMap in project midpoint by Evolveum.

the class IndexedRelationDefinitions method initializeRelationDefinitionsByRelationName.

/**
 * Removes duplicate definitions as well.
 */
@NotNull
private Map<QName, RelationDefinitionType> initializeRelationDefinitionsByRelationName(List<RelationDefinitionType> definitions) {
    Map<QName, RelationDefinitionType> map = new HashMap<>();
    ListValuedMap<String, QName> expansions = new ArrayListValuedHashMap<>();
    for (Iterator<RelationDefinitionType> iterator = definitions.iterator(); iterator.hasNext(); ) {
        RelationDefinitionType definition = iterator.next();
        if (map.containsKey(definition.getRef())) {
            LOGGER.error("Duplicate relation definition for '{}'; ignoring: {}", definition.getRef(), definition);
            iterator.remove();
        } else {
            map.put(definition.getRef(), definition);
            expansions.put(definition.getRef().getLocalPart(), definition.getRef());
        }
    }
    // add entries for unqualified versions of the relation names
    for (String unqualified : expansions.keySet()) {
        List<QName> names = expansions.get(unqualified);
        if (names.contains(new QName(unqualified))) {
            // cannot expand unqualified if the expanded value is also unqualified
            continue;
        }
        assert !names.isEmpty();
        assert names.stream().allMatch(QNameUtil::isQualified);
        @NotNull QName chosenExpansion;
        if (names.size() == 1) {
            chosenExpansion = names.get(0);
        } else {
            QName nameInOrgNamespace = names.stream().filter(n -> SchemaConstants.NS_ORG.equals(n.getNamespaceURI())).findFirst().orElse(null);
            if (nameInOrgNamespace != null) {
                // org:xxx expansion will be the default one
                chosenExpansion = nameInOrgNamespace;
            } else {
                chosenExpansion = names.get(0);
                LOGGER.warn("Multiple resolutions of unqualified relation name '{}' ({}); " + "using the first one as default: '{}'. Please reconsider this as it could lead to " + "unpredictable behavior.", unqualified, names, chosenExpansion);
            }
        }
        assert QNameUtil.isQualified(chosenExpansion);
        map.put(new QName(unqualified), map.get(chosenExpansion));
    }
    return map;
}
Also used : RelationDefinitionType(com.evolveum.midpoint.xml.ns._public.common.common_3.RelationDefinitionType) HashSetValuedHashMap(org.apache.commons.collections4.multimap.HashSetValuedHashMap) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap) QName(javax.xml.namespace.QName) QNameUtil(com.evolveum.midpoint.util.QNameUtil) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap) NotNull(org.jetbrains.annotations.NotNull) NotNull(org.jetbrains.annotations.NotNull)

Example 5 with ArrayListValuedHashMap

use of org.apache.commons.collections4.multimap.ArrayListValuedHashMap in project hive by apache.

the class ParallelEdgeFixer method fixParallelEdges.

private void fixParallelEdges(OperatorGraph og) throws SemanticException {
    // Identify edge operators
    ListValuedMap<Pair<Cluster, Cluster>, Pair<Operator<?>, Operator<?>>> edgeOperators = new ArrayListValuedHashMap<>();
    for (Cluster c : og.getClusters()) {
        for (Operator<?> o : c.getMembers()) {
            for (Operator<? extends OperatorDesc> p : o.getParentOperators()) {
                Cluster parentCluster = og.clusterOf(p);
                if (parentCluster == c) {
                    continue;
                }
                edgeOperators.put(new Pair<>(parentCluster, c), new Pair<>(p, o));
            }
        }
    }
    // process all edges and fix parallel edges if there are any
    for (Pair<Cluster, Cluster> key : edgeOperators.keySet()) {
        List<Pair<Operator<?>, Operator<?>>> values = edgeOperators.get(key);
        if (values.size() <= 1) {
            continue;
        }
        // operator order must in stabile order - or we end up with falky plans causing flaky tests...
        values.sort(new OperatorPairComparator());
        // remove one optionally unsupported edge (it will be kept as is)
        removeOneEdge(values);
        Iterator<Pair<Operator<?>, Operator<?>>> it = values.iterator();
        while (it.hasNext()) {
            Pair<Operator<?>, Operator<?>> pair = it.next();
            fixParallelEdge(pair.left, pair.right);
        }
    }
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) Cluster(org.apache.hadoop.hive.ql.optimizer.graph.OperatorGraph.Cluster) ArrayListValuedHashMap(org.apache.commons.collections4.multimap.ArrayListValuedHashMap) Pair(org.apache.calcite.util.Pair)

Aggregations

ArrayListValuedHashMap (org.apache.commons.collections4.multimap.ArrayListValuedHashMap)10 DimensionItemObjectValue (org.hisp.dhis.common.DimensionItemObjectValue)4 ArrayList (java.util.ArrayList)3 List (java.util.List)3 DhisSpringTest (org.hisp.dhis.DhisSpringTest)3 DimensionalObjectUtils.getList (org.hisp.dhis.common.DimensionalObjectUtils.getList)3 Test (org.junit.jupiter.api.Test)3 Flow (org.openkilda.model.Flow)3 SwitchId (org.openkilda.model.SwitchId)3 YFlow (org.openkilda.model.YFlow)3 SwitchFlowEntriesBuilder (org.openkilda.wfm.topology.flowhs.fsm.validation.SwitchFlowEntriesBuilder)3 FlowPath (org.openkilda.model.FlowPath)2 QNameUtil (com.evolveum.midpoint.util.QNameUtil)1 RelationDefinitionType (com.evolveum.midpoint.xml.ns._public.common.common_3.RelationDefinitionType)1 HashMap (java.util.HashMap)1 Tuple (javax.persistence.Tuple)1 CriteriaBuilder (javax.persistence.criteria.CriteriaBuilder)1 Predicate (javax.persistence.criteria.Predicate)1 QName (javax.xml.namespace.QName)1 Pair (org.apache.calcite.util.Pair)1