Search in sources :

Example 6 with Builder

use of com.google.common.collect.ImmutableList.Builder in project GeoGig by boundlessgeo.

the class HttpUtils method getParents.

/**
     * Gets the parents of the specified commit from the remote repository.
     * 
     * @param repositoryURL the URL of the repository
     * @param commit the id of the commit whose parents to retrieve
     * @return a list of parent ids for the commit
     */
public static ImmutableList<ObjectId> getParents(URL repositoryURL, ObjectId commit) {
    HttpURLConnection connection = null;
    Builder<ObjectId> listBuilder = new ImmutableList.Builder<ObjectId>();
    try {
        String expanded = repositoryURL.toString() + "/repo/getparents?commitId=" + commit.toString();
        connection = connect(expanded);
        // Get Response
        InputStream is = HttpUtils.getResponseStream(connection);
        try {
            BufferedReader rd = new BufferedReader(new InputStreamReader(is));
            String line = rd.readLine();
            while (line != null) {
                listBuilder.add(ObjectId.valueOf(line));
                line = rd.readLine();
            }
        } finally {
            consumeAndCloseStream(is);
        }
    } catch (Exception e) {
        Throwables.propagate(e);
    } finally {
        consumeErrStreamAndCloseConnection(connection);
    }
    return listBuilder.build();
}
Also used : HttpURLConnection(java.net.HttpURLConnection) InputStreamReader(java.io.InputStreamReader) ObjectId(org.locationtech.geogig.api.ObjectId) GZIPInputStream(java.util.zip.GZIPInputStream) FilterInputStream(java.io.FilterInputStream) CountingInputStream(com.google.common.io.CountingInputStream) InputStream(java.io.InputStream) Builder(com.google.common.collect.ImmutableList.Builder) BufferedReader(java.io.BufferedReader) XMLStreamException(javax.xml.stream.XMLStreamException) IOException(java.io.IOException)

Example 7 with Builder

use of com.google.common.collect.ImmutableList.Builder in project GeoGig by boundlessgeo.

the class HttpUtils method getAffectedFeatures.

/**
     * Retrieves a list of features that were modified or deleted by a particular commit.
     * 
     * @param repositoryURL the URL of the repository
     * @param commit the id of the commit to check
     * @return a list of features affected by the commit
     */
public static ImmutableList<ObjectId> getAffectedFeatures(URL repositoryURL, ObjectId commit) {
    HttpURLConnection connection = null;
    Builder<ObjectId> listBuilder = new ImmutableList.Builder<ObjectId>();
    try {
        String expanded = repositoryURL.toString() + "/repo/affectedfeatures?commitId=" + commit.toString();
        connection = connect(expanded);
        // Get Response
        InputStream is = HttpUtils.getResponseStream(connection);
        try {
            BufferedReader rd = new BufferedReader(new InputStreamReader(is));
            String line = rd.readLine();
            while (line != null) {
                listBuilder.add(ObjectId.valueOf(line));
                line = rd.readLine();
            }
        } finally {
            consumeAndCloseStream(is);
        }
    } catch (Exception e) {
        Throwables.propagate(e);
    } finally {
        consumeErrStreamAndCloseConnection(connection);
    }
    return listBuilder.build();
}
Also used : HttpURLConnection(java.net.HttpURLConnection) InputStreamReader(java.io.InputStreamReader) ObjectId(org.locationtech.geogig.api.ObjectId) GZIPInputStream(java.util.zip.GZIPInputStream) FilterInputStream(java.io.FilterInputStream) CountingInputStream(com.google.common.io.CountingInputStream) InputStream(java.io.InputStream) Builder(com.google.common.collect.ImmutableList.Builder) BufferedReader(java.io.BufferedReader) XMLStreamException(javax.xml.stream.XMLStreamException) IOException(java.io.IOException)

Example 8 with Builder

use of com.google.common.collect.ImmutableList.Builder in project hive by apache.

the class HiveExceptRewriteRule method onMatch.

// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
    final HiveExcept hiveExcept = call.rel(0);
    final RelOptCluster cluster = hiveExcept.getCluster();
    final RexBuilder rexBuilder = cluster.getRexBuilder();
    Builder<RelNode> bldr = new ImmutableList.Builder<RelNode>();
    // branch
    try {
        bldr.add(createFirstGB(hiveExcept.getInputs().get(0), true, cluster, rexBuilder));
        bldr.add(createFirstGB(hiveExcept.getInputs().get(1), false, cluster, rexBuilder));
    } catch (CalciteSemanticException e) {
        LOG.debug(e.toString());
        throw new RuntimeException(e);
    }
    // create a union above all the branches
    // the schema of union looks like this
    // all keys + VCol + c
    HiveRelNode union = new HiveUnion(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build());
    // 2nd level GB: create a GB (all keys + sum(c) as a + sum(VCol*c) as b) for
    // each branch
    final List<RexNode> gbChildProjLst = Lists.newArrayList();
    final List<Integer> groupSetPositions = Lists.newArrayList();
    int unionColumnSize = union.getRowType().getFieldList().size();
    for (int cInd = 0; cInd < unionColumnSize; cInd++) {
        gbChildProjLst.add(rexBuilder.makeInputRef(union, cInd));
        // the last 2 columns are VCol and c
        if (cInd < unionColumnSize - 2) {
            groupSetPositions.add(cInd);
        }
    }
    try {
        gbChildProjLst.add(multiply(rexBuilder.makeInputRef(union, unionColumnSize - 2), rexBuilder.makeInputRef(union, unionColumnSize - 1), cluster, rexBuilder));
    } catch (CalciteSemanticException e) {
        LOG.debug(e.toString());
        throw new RuntimeException(e);
    }
    RelNode gbInputRel = null;
    try {
        // Here we create a project for the following reasons:
        // (1) GBy only accepts arg as a position of the input, however, we need to sum on VCol*c
        // (2) This can better reuse the function createSingleArgAggCall.
        gbInputRel = HiveProject.create(union, gbChildProjLst, null);
    } catch (CalciteSemanticException e) {
        LOG.debug(e.toString());
        throw new RuntimeException(e);
    }
    // gbInputRel's schema is like this
    // all keys + VCol + c + VCol*c
    List<AggregateCall> aggregateCalls = Lists.newArrayList();
    RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory());
    // sum(c)
    AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize - 1, aggFnRetType);
    aggregateCalls.add(aggregateCall);
    // sum(VCol*c)
    aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize, aggFnRetType);
    aggregateCalls.add(aggregateCall);
    final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions);
    HiveRelNode aggregateRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, false, groupSet, null, aggregateCalls);
    if (!hiveExcept.all) {
        RelNode filterRel = null;
        try {
            filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), aggregateRel, makeFilterExprForExceptDistinct(aggregateRel, unionColumnSize, cluster, rexBuilder));
        } catch (CalciteSemanticException e) {
            LOG.debug(e.toString());
            throw new RuntimeException(e);
        }
        // finally add a project to project out the last 2 columns
        Set<Integer> projectOutColumnPositions = new HashSet<>();
        projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 2);
        projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 1);
        try {
            call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(filterRel, projectOutColumnPositions));
        } catch (CalciteSemanticException e) {
            LOG.debug(e.toString());
            throw new RuntimeException(e);
        }
    } else {
        List<RexNode> originalInputRefs = Lists.transform(aggregateRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() {

            @Override
            public RexNode apply(RelDataTypeField input) {
                return new RexInputRef(input.getIndex(), input.getType());
            }
        });
        List<RexNode> copyInputRefs = new ArrayList<>();
        try {
            copyInputRefs.add(makeExprForExceptAll(aggregateRel, unionColumnSize, cluster, rexBuilder));
        } catch (CalciteSemanticException e) {
            LOG.debug(e.toString());
            throw new RuntimeException(e);
        }
        for (int i = 0; i < originalInputRefs.size() - 2; i++) {
            copyInputRefs.add(originalInputRefs.get(i));
        }
        RelNode srcRel = null;
        try {
            srcRel = HiveProject.create(aggregateRel, copyInputRefs, null);
            HiveTableFunctionScan udtf = HiveCalciteUtil.createUDTFForSetOp(cluster, srcRel);
            // finally add a project to project out the 1st columns
            Set<Integer> projectOutColumnPositions = new HashSet<>();
            projectOutColumnPositions.add(0);
            call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(udtf, projectOutColumnPositions));
        } catch (SemanticException e) {
            LOG.debug(e.toString());
            throw new RuntimeException(e);
        }
    }
}
Also used : RelOptCluster(org.apache.calcite.plan.RelOptCluster) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) RelBuilder(org.apache.calcite.tools.RelBuilder) RexBuilder(org.apache.calcite.rex.RexBuilder) Builder(com.google.common.collect.ImmutableList.Builder) HiveTableFunctionScan(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableFunctionScan) HiveRelNode(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode) ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) RexBuilder(org.apache.calcite.rex.RexBuilder) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) HashSet(java.util.HashSet) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) HiveExcept(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExcept) HiveUnion(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveUnion) HiveFilter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter) AggregateCall(org.apache.calcite.rel.core.AggregateCall) HiveAggregate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) HiveRelNode(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode) RelNode(org.apache.calcite.rel.RelNode) RexInputRef(org.apache.calcite.rex.RexInputRef) RexNode(org.apache.calcite.rex.RexNode)

Example 9 with Builder

use of com.google.common.collect.ImmutableList.Builder in project hive by apache.

the class HiveTableScan method buildColIndxsFrmReloptHT.

private static Pair<ImmutableList<Integer>, ImmutableSet<Integer>> buildColIndxsFrmReloptHT(RelOptHiveTable relOptHTable, RelDataType scanRowType) {
    RelDataType relOptHtRowtype = relOptHTable.getRowType();
    ImmutableList<Integer> neededColIndxsFrmReloptHT;
    Builder<Integer> neededColIndxsFrmReloptHTBldr = new ImmutableList.Builder<Integer>();
    ImmutableSet<Integer> viurtualOrPartColIndxsInTS;
    ImmutableSet.Builder<Integer> viurtualOrPartColIndxsInTSBldr = new ImmutableSet.Builder<Integer>();
    Map<String, Integer> colNameToPosInReloptHT = HiveCalciteUtil.getRowColNameIndxMap(relOptHtRowtype.getFieldList());
    List<String> colNamesInScanRowType = scanRowType.getFieldNames();
    int partOrVirtualColStartPosInrelOptHtRowtype = relOptHTable.getNonPartColumns().size();
    int tmp;
    for (int i = 0; i < colNamesInScanRowType.size(); i++) {
        tmp = colNameToPosInReloptHT.get(colNamesInScanRowType.get(i));
        neededColIndxsFrmReloptHTBldr.add(tmp);
        if (tmp >= partOrVirtualColStartPosInrelOptHtRowtype) {
            viurtualOrPartColIndxsInTSBldr.add(i);
        }
    }
    neededColIndxsFrmReloptHT = neededColIndxsFrmReloptHTBldr.build();
    viurtualOrPartColIndxsInTS = viurtualOrPartColIndxsInTSBldr.build();
    return new Pair<ImmutableList<Integer>, ImmutableSet<Integer>>(neededColIndxsFrmReloptHT, viurtualOrPartColIndxsInTS);
}
Also used : RelBuilder(org.apache.calcite.tools.RelBuilder) RexBuilder(org.apache.calcite.rex.RexBuilder) Builder(com.google.common.collect.ImmutableList.Builder) RelDataType(org.apache.calcite.rel.type.RelDataType) ImmutableSet(com.google.common.collect.ImmutableSet) Pair(org.apache.calcite.util.Pair)

Example 10 with Builder

use of com.google.common.collect.ImmutableList.Builder in project ORCID-Source by ORCID.

the class IdentifierTypeManagerImpl method queryByPrefix.

/**
     * Queries the identifier name and description fields for words that START WITH query.
     * Returns an immutable list of matching types.
     * Null locale will result in Locale.ENGLISH
     * 
     */
@Override
@Cacheable("identifier-types-map-prefix")
public List<IdentifierType> queryByPrefix(String query, Locale loc) {
    Map<String, IdentifierType> results = new HashMap<String, IdentifierType>();
    Map<String, IdentifierType> types = fetchIdentifierTypesByAPITypeName(loc);
    //stick them in a trie so we can do a deep prefix search
    PatriciaTrie<Set<IdentifierType>> trie = new PatriciaTrie<Set<IdentifierType>>();
    for (String type : types.keySet()) {
        IdentifierType t = types.get(type);
        if (!trie.containsKey(t.getName().toLowerCase()))
            trie.put(t.getName().toLowerCase(), new HashSet<IdentifierType>());
        trie.get(t.getName().toLowerCase()).add(t);
        for (String s : t.getDescription().toLowerCase().split(" ")) {
            if (!trie.containsKey(s))
                trie.put(s, new HashSet<IdentifierType>());
            trie.get(s).add(t);
        }
    }
    //dedupe and sort
    SortedMap<String, Set<IdentifierType>> sorted = trie.prefixMap(query.toLowerCase());
    for (Set<IdentifierType> set : sorted.values()) {
        for (IdentifierType t : set) {
            if (!results.containsKey(t.getDescription().toLowerCase()))
                results.put(t.getDescription().toLowerCase(), t);
        }
    }
    //put anything that starts with query at the top of the list.
    Builder<IdentifierType> builder = new Builder<IdentifierType>();
    for (IdentifierType t : results.values()) {
        if (t.getDescription().toLowerCase().startsWith(query.toLowerCase())) {
            builder.add(t);
        }
    }
    for (IdentifierType t : results.values()) {
        if (!t.getDescription().toLowerCase().startsWith(query.toLowerCase())) {
            builder.add(t);
        }
    }
    return builder.build();
}
Also used : HashSet(java.util.HashSet) Set(java.util.Set) HashMap(java.util.HashMap) PatriciaTrie(org.apache.commons.collections4.trie.PatriciaTrie) Builder(com.google.common.collect.ImmutableList.Builder) IdentifierType(org.orcid.pojo.IdentifierType) HashSet(java.util.HashSet) Cacheable(org.springframework.cache.annotation.Cacheable)

Aggregations

Builder (com.google.common.collect.ImmutableList.Builder)10 ImmutableList (com.google.common.collect.ImmutableList)3 HashSet (java.util.HashSet)3 RelDataType (org.apache.calcite.rel.type.RelDataType)3 RexBuilder (org.apache.calcite.rex.RexBuilder)3 RelBuilder (org.apache.calcite.tools.RelBuilder)3 CountingInputStream (com.google.common.io.CountingInputStream)2 BufferedReader (java.io.BufferedReader)2 FilterInputStream (java.io.FilterInputStream)2 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 InputStreamReader (java.io.InputStreamReader)2 HttpURLConnection (java.net.HttpURLConnection)2 ArrayList (java.util.ArrayList)2 GZIPInputStream (java.util.zip.GZIPInputStream)2 XMLStreamException (javax.xml.stream.XMLStreamException)2 RelOptCluster (org.apache.calcite.plan.RelOptCluster)2 RelNode (org.apache.calcite.rel.RelNode)2 AggregateCall (org.apache.calcite.rel.core.AggregateCall)2 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)2