use of com.google.common.collect.ImmutableList.Builder in project GeoGig by boundlessgeo.
the class HttpUtils method getParents.
/**
* Gets the parents of the specified commit from the remote repository.
*
* @param repositoryURL the URL of the repository
* @param commit the id of the commit whose parents to retrieve
* @return a list of parent ids for the commit
*/
public static ImmutableList<ObjectId> getParents(URL repositoryURL, ObjectId commit) {
HttpURLConnection connection = null;
Builder<ObjectId> listBuilder = new ImmutableList.Builder<ObjectId>();
try {
String expanded = repositoryURL.toString() + "/repo/getparents?commitId=" + commit.toString();
connection = connect(expanded);
// Get Response
InputStream is = HttpUtils.getResponseStream(connection);
try {
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line = rd.readLine();
while (line != null) {
listBuilder.add(ObjectId.valueOf(line));
line = rd.readLine();
}
} finally {
consumeAndCloseStream(is);
}
} catch (Exception e) {
Throwables.propagate(e);
} finally {
consumeErrStreamAndCloseConnection(connection);
}
return listBuilder.build();
}
use of com.google.common.collect.ImmutableList.Builder in project GeoGig by boundlessgeo.
the class HttpUtils method getAffectedFeatures.
/**
* Retrieves a list of features that were modified or deleted by a particular commit.
*
* @param repositoryURL the URL of the repository
* @param commit the id of the commit to check
* @return a list of features affected by the commit
*/
public static ImmutableList<ObjectId> getAffectedFeatures(URL repositoryURL, ObjectId commit) {
HttpURLConnection connection = null;
Builder<ObjectId> listBuilder = new ImmutableList.Builder<ObjectId>();
try {
String expanded = repositoryURL.toString() + "/repo/affectedfeatures?commitId=" + commit.toString();
connection = connect(expanded);
// Get Response
InputStream is = HttpUtils.getResponseStream(connection);
try {
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line = rd.readLine();
while (line != null) {
listBuilder.add(ObjectId.valueOf(line));
line = rd.readLine();
}
} finally {
consumeAndCloseStream(is);
}
} catch (Exception e) {
Throwables.propagate(e);
} finally {
consumeErrStreamAndCloseConnection(connection);
}
return listBuilder.build();
}
use of com.google.common.collect.ImmutableList.Builder in project hive by apache.
the class HiveExceptRewriteRule method onMatch.
// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
final HiveExcept hiveExcept = call.rel(0);
final RelOptCluster cluster = hiveExcept.getCluster();
final RexBuilder rexBuilder = cluster.getRexBuilder();
Builder<RelNode> bldr = new ImmutableList.Builder<RelNode>();
// branch
try {
bldr.add(createFirstGB(hiveExcept.getInputs().get(0), true, cluster, rexBuilder));
bldr.add(createFirstGB(hiveExcept.getInputs().get(1), false, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// create a union above all the branches
// the schema of union looks like this
// all keys + VCol + c
HiveRelNode union = new HiveUnion(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build());
// 2nd level GB: create a GB (all keys + sum(c) as a + sum(VCol*c) as b) for
// each branch
final List<RexNode> gbChildProjLst = Lists.newArrayList();
final List<Integer> groupSetPositions = Lists.newArrayList();
int unionColumnSize = union.getRowType().getFieldList().size();
for (int cInd = 0; cInd < unionColumnSize; cInd++) {
gbChildProjLst.add(rexBuilder.makeInputRef(union, cInd));
// the last 2 columns are VCol and c
if (cInd < unionColumnSize - 2) {
groupSetPositions.add(cInd);
}
}
try {
gbChildProjLst.add(multiply(rexBuilder.makeInputRef(union, unionColumnSize - 2), rexBuilder.makeInputRef(union, unionColumnSize - 1), cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
RelNode gbInputRel = null;
try {
// Here we create a project for the following reasons:
// (1) GBy only accepts arg as a position of the input, however, we need to sum on VCol*c
// (2) This can better reuse the function createSingleArgAggCall.
gbInputRel = HiveProject.create(union, gbChildProjLst, null);
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// gbInputRel's schema is like this
// all keys + VCol + c + VCol*c
List<AggregateCall> aggregateCalls = Lists.newArrayList();
RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory());
// sum(c)
AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize - 1, aggFnRetType);
aggregateCalls.add(aggregateCall);
// sum(VCol*c)
aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize, aggFnRetType);
aggregateCalls.add(aggregateCall);
final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions);
HiveRelNode aggregateRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, false, groupSet, null, aggregateCalls);
if (!hiveExcept.all) {
RelNode filterRel = null;
try {
filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), aggregateRel, makeFilterExprForExceptDistinct(aggregateRel, unionColumnSize, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// finally add a project to project out the last 2 columns
Set<Integer> projectOutColumnPositions = new HashSet<>();
projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 2);
projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 1);
try {
call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(filterRel, projectOutColumnPositions));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
} else {
List<RexNode> originalInputRefs = Lists.transform(aggregateRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() {
@Override
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
List<RexNode> copyInputRefs = new ArrayList<>();
try {
copyInputRefs.add(makeExprForExceptAll(aggregateRel, unionColumnSize, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
for (int i = 0; i < originalInputRefs.size() - 2; i++) {
copyInputRefs.add(originalInputRefs.get(i));
}
RelNode srcRel = null;
try {
srcRel = HiveProject.create(aggregateRel, copyInputRefs, null);
HiveTableFunctionScan udtf = HiveCalciteUtil.createUDTFForSetOp(cluster, srcRel);
// finally add a project to project out the 1st columns
Set<Integer> projectOutColumnPositions = new HashSet<>();
projectOutColumnPositions.add(0);
call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(udtf, projectOutColumnPositions));
} catch (SemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
}
}
use of com.google.common.collect.ImmutableList.Builder in project hive by apache.
the class HiveTableScan method buildColIndxsFrmReloptHT.
private static Pair<ImmutableList<Integer>, ImmutableSet<Integer>> buildColIndxsFrmReloptHT(RelOptHiveTable relOptHTable, RelDataType scanRowType) {
RelDataType relOptHtRowtype = relOptHTable.getRowType();
ImmutableList<Integer> neededColIndxsFrmReloptHT;
Builder<Integer> neededColIndxsFrmReloptHTBldr = new ImmutableList.Builder<Integer>();
ImmutableSet<Integer> viurtualOrPartColIndxsInTS;
ImmutableSet.Builder<Integer> viurtualOrPartColIndxsInTSBldr = new ImmutableSet.Builder<Integer>();
Map<String, Integer> colNameToPosInReloptHT = HiveCalciteUtil.getRowColNameIndxMap(relOptHtRowtype.getFieldList());
List<String> colNamesInScanRowType = scanRowType.getFieldNames();
int partOrVirtualColStartPosInrelOptHtRowtype = relOptHTable.getNonPartColumns().size();
int tmp;
for (int i = 0; i < colNamesInScanRowType.size(); i++) {
tmp = colNameToPosInReloptHT.get(colNamesInScanRowType.get(i));
neededColIndxsFrmReloptHTBldr.add(tmp);
if (tmp >= partOrVirtualColStartPosInrelOptHtRowtype) {
viurtualOrPartColIndxsInTSBldr.add(i);
}
}
neededColIndxsFrmReloptHT = neededColIndxsFrmReloptHTBldr.build();
viurtualOrPartColIndxsInTS = viurtualOrPartColIndxsInTSBldr.build();
return new Pair<ImmutableList<Integer>, ImmutableSet<Integer>>(neededColIndxsFrmReloptHT, viurtualOrPartColIndxsInTS);
}
use of com.google.common.collect.ImmutableList.Builder in project ORCID-Source by ORCID.
the class IdentifierTypeManagerImpl method queryByPrefix.
/**
* Queries the identifier name and description fields for words that START WITH query.
* Returns an immutable list of matching types.
* Null locale will result in Locale.ENGLISH
*
*/
@Override
@Cacheable("identifier-types-map-prefix")
public List<IdentifierType> queryByPrefix(String query, Locale loc) {
Map<String, IdentifierType> results = new HashMap<String, IdentifierType>();
Map<String, IdentifierType> types = fetchIdentifierTypesByAPITypeName(loc);
//stick them in a trie so we can do a deep prefix search
PatriciaTrie<Set<IdentifierType>> trie = new PatriciaTrie<Set<IdentifierType>>();
for (String type : types.keySet()) {
IdentifierType t = types.get(type);
if (!trie.containsKey(t.getName().toLowerCase()))
trie.put(t.getName().toLowerCase(), new HashSet<IdentifierType>());
trie.get(t.getName().toLowerCase()).add(t);
for (String s : t.getDescription().toLowerCase().split(" ")) {
if (!trie.containsKey(s))
trie.put(s, new HashSet<IdentifierType>());
trie.get(s).add(t);
}
}
//dedupe and sort
SortedMap<String, Set<IdentifierType>> sorted = trie.prefixMap(query.toLowerCase());
for (Set<IdentifierType> set : sorted.values()) {
for (IdentifierType t : set) {
if (!results.containsKey(t.getDescription().toLowerCase()))
results.put(t.getDescription().toLowerCase(), t);
}
}
//put anything that starts with query at the top of the list.
Builder<IdentifierType> builder = new Builder<IdentifierType>();
for (IdentifierType t : results.values()) {
if (t.getDescription().toLowerCase().startsWith(query.toLowerCase())) {
builder.add(t);
}
}
for (IdentifierType t : results.values()) {
if (!t.getDescription().toLowerCase().startsWith(query.toLowerCase())) {
builder.add(t);
}
}
return builder.build();
}
Aggregations