use of com.google.common.collect.ImmutableList.builder in project ORCID-Source by ORCID.
the class IdentifierTypeManagerImpl method queryByPrefix.
/**
* Queries the identifier name and description fields for words that START WITH query.
* Returns an immutable list of matching types.
* Null locale will result in Locale.ENGLISH
*/
@Override
@Cacheable("identifier-types-map-prefix")
public List<IdentifierType> queryByPrefix(String query, Locale loc) {
Map<String, IdentifierType> results = new HashMap<String, IdentifierType>();
Map<String, IdentifierType> types = fetchIdentifierTypesByAPITypeName(loc);
// stick them in a trie so we can do a deep prefix search
PatriciaTrie<Set<IdentifierType>> trie = new PatriciaTrie<Set<IdentifierType>>();
for (String type : types.keySet()) {
IdentifierType t = types.get(type);
if (!trie.containsKey(t.getName().toLowerCase()))
trie.put(t.getName().toLowerCase(), new HashSet<IdentifierType>());
trie.get(t.getName().toLowerCase()).add(t);
for (String s : t.getDescription().toLowerCase().split(" ")) {
if (!trie.containsKey(s))
trie.put(s, new HashSet<IdentifierType>());
trie.get(s).add(t);
}
}
// dedupe and sort
SortedMap<String, Set<IdentifierType>> sorted = trie.prefixMap(query.toLowerCase());
for (Set<IdentifierType> set : sorted.values()) {
for (IdentifierType t : set) {
if (!results.containsKey(t.getDescription().toLowerCase()))
results.put(t.getDescription().toLowerCase(), t);
}
}
// put anything that starts with query at the top of the list.
Builder<IdentifierType> builder = new Builder<IdentifierType>();
for (IdentifierType t : results.values()) {
if (t.getDescription().toLowerCase().startsWith(query.toLowerCase())) {
builder.add(t);
}
}
for (IdentifierType t : results.values()) {
if (!t.getDescription().toLowerCase().startsWith(query.toLowerCase())) {
builder.add(t);
}
}
return builder.build();
}
use of com.google.common.collect.ImmutableList.builder in project hive by apache.
the class HiveExceptRewriteRule method onMatch.
// ~ Methods ----------------------------------------------------------------
public void onMatch(RelOptRuleCall call) {
final HiveExcept hiveExcept = call.rel(0);
final RelOptCluster cluster = hiveExcept.getCluster();
final RexBuilder rexBuilder = cluster.getRexBuilder();
Builder<RelNode> bldr = new ImmutableList.Builder<RelNode>();
// branch
try {
bldr.add(createFirstGB(hiveExcept.getInputs().get(0), true, cluster, rexBuilder));
bldr.add(createFirstGB(hiveExcept.getInputs().get(1), false, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// create a union above all the branches
// the schema of union looks like this
// all keys + VCol + c
HiveRelNode union = new HiveUnion(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build());
// 2nd level GB: create a GB (all keys + sum(c) as a + sum(VCol*c) as b) for
// each branch
final List<RexNode> gbChildProjLst = Lists.newArrayList();
final List<Integer> groupSetPositions = Lists.newArrayList();
int unionColumnSize = union.getRowType().getFieldList().size();
for (int cInd = 0; cInd < unionColumnSize; cInd++) {
gbChildProjLst.add(rexBuilder.makeInputRef(union, cInd));
// the last 2 columns are VCol and c
if (cInd < unionColumnSize - 2) {
groupSetPositions.add(cInd);
}
}
try {
gbChildProjLst.add(multiply(rexBuilder.makeInputRef(union, unionColumnSize - 2), rexBuilder.makeInputRef(union, unionColumnSize - 1), cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
RelNode gbInputRel = null;
try {
// Here we create a project for the following reasons:
// (1) GBy only accepts arg as a position of the input, however, we need to sum on VCol*c
// (2) This can better reuse the function createSingleArgAggCall.
gbInputRel = HiveProject.create(union, gbChildProjLst, null);
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// gbInputRel's schema is like this
// all keys + VCol + c + VCol*c
List<AggregateCall> aggregateCalls = Lists.newArrayList();
RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory());
// sum(c)
AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize - 1, aggFnRetType);
aggregateCalls.add(aggregateCall);
// sum(VCol*c)
aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize, aggFnRetType);
aggregateCalls.add(aggregateCall);
final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions);
HiveRelNode aggregateRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, null, aggregateCalls);
if (!hiveExcept.all) {
RelNode filterRel = null;
try {
filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), aggregateRel, makeFilterExprForExceptDistinct(aggregateRel, unionColumnSize, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
// finally add a project to project out the last 2 columns
Set<Integer> projectOutColumnPositions = new HashSet<>();
projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 2);
projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 1);
try {
call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(filterRel, projectOutColumnPositions));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
} else {
List<RexNode> originalInputRefs = Lists.transform(aggregateRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() {
@Override
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
List<RexNode> copyInputRefs = new ArrayList<>();
try {
copyInputRefs.add(makeExprForExceptAll(aggregateRel, unionColumnSize, cluster, rexBuilder));
} catch (CalciteSemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
for (int i = 0; i < originalInputRefs.size() - 2; i++) {
copyInputRefs.add(originalInputRefs.get(i));
}
RelNode srcRel = null;
try {
srcRel = HiveProject.create(aggregateRel, copyInputRefs, null);
HiveTableFunctionScan udtf = HiveCalciteUtil.createUDTFForSetOp(cluster, srcRel);
// finally add a project to project out the 1st columns
Set<Integer> projectOutColumnPositions = new HashSet<>();
projectOutColumnPositions.add(0);
call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(udtf, projectOutColumnPositions));
} catch (SemanticException e) {
LOG.debug(e.toString());
throw new RuntimeException(e);
}
}
}
use of com.google.common.collect.ImmutableList.builder in project hive by apache.
the class HiveTableScan method buildColIndxsFrmReloptHT.
private static Triple<ImmutableList<Integer>, ImmutableSet<Integer>, ImmutableSet<Integer>> buildColIndxsFrmReloptHT(RelOptHiveTable relOptHTable, RelDataType scanRowType) {
RelDataType relOptHtRowtype = relOptHTable.getRowType();
Builder<Integer> neededColIndxsFrmReloptHTBldr = new ImmutableList.Builder<Integer>();
ImmutableSet.Builder<Integer> virtualOrPartColIndxsInTSBldr = new ImmutableSet.Builder<Integer>();
ImmutableSet.Builder<Integer> virtualColIndxsInTSBldr = new ImmutableSet.Builder<Integer>();
Map<String, Integer> colNameToPosInReloptHT = HiveCalciteUtil.getRowColNameIndxMap(relOptHtRowtype.getFieldList());
List<String> colNamesInScanRowType = scanRowType.getFieldNames();
int partColStartPosInrelOptHtRowtype = relOptHTable.getNonPartColumns().size();
int virtualColStartPosInrelOptHtRowtype = relOptHTable.getNonPartColumns().size() + relOptHTable.getPartColumns().size();
int tmp;
for (int i = 0; i < colNamesInScanRowType.size(); i++) {
tmp = colNameToPosInReloptHT.get(colNamesInScanRowType.get(i));
neededColIndxsFrmReloptHTBldr.add(tmp);
if (tmp >= partColStartPosInrelOptHtRowtype) {
// Part or virtual
virtualOrPartColIndxsInTSBldr.add(i);
if (tmp >= virtualColStartPosInrelOptHtRowtype) {
// Virtual
virtualColIndxsInTSBldr.add(i);
}
}
}
return Triple.of(neededColIndxsFrmReloptHTBldr.build(), virtualOrPartColIndxsInTSBldr.build(), virtualColIndxsInTSBldr.build());
}
use of com.google.common.collect.ImmutableList.builder in project coprhd-controller by CoprHD.
the class ApiPrimitiveMaker method makeInput.
/**
* Make the Input parameter fields and the input parameter array for this
* primitive
*
* @param method
* ApiMethod that is used to generate the primitive
* @param requestFields
* The fields built from the request
*
* @return the List of input fields
*/
private static Iterable<FieldSpec> makeInput(final ApiMethod method, ImmutableMap<String, FieldSpec> requestFields) {
final ImmutableList.Builder<FieldSpec> builder = ImmutableList.<FieldSpec>builder();
final ImmutableList.Builder<String> path_parameters = new ImmutableList.Builder<String>();
final ImmutableList.Builder<String> query_parameters = new ImmutableList.Builder<String>();
final ImmutableList.Builder<String> body_parameters = new ImmutableList.Builder<String>();
final ParameterFieldName.Input name = new ParameterFieldName.Input();
body_parameters.addAll(requestFields.keySet());
builder.addAll(requestFields.values());
for (ApiField pathParameter : method.pathParameters) {
FieldSpec param = makeInputParameter(name, pathParameter, true);
path_parameters.add(param.name);
builder.add(param);
}
for (ApiField queryParameter : method.queryParameters) {
FieldSpec param = makeInputParameter(name, queryParameter, queryParameter.required);
query_parameters.add(param.name);
builder.add(param);
}
builder.add(FieldSpec.builder(ParameterizedTypeName.get(ImmutableList.class, InputParameter.class), "PATH_INPUT").addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC).initializer("new $T().add($L).build()", ParameterizedTypeName.get(ImmutableList.Builder.class, InputParameter.class), Joiner.on(",").join(path_parameters.build())).build());
builder.add(FieldSpec.builder(ParameterizedTypeName.get(ImmutableList.class, InputParameter.class), "QUERY_INPUT").addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC).initializer("new $T().add($L).build()", ParameterizedTypeName.get(ImmutableList.Builder.class, InputParameter.class), Joiner.on(",").join(query_parameters.build())).build());
builder.add(FieldSpec.builder(ParameterizedTypeName.get(ImmutableList.class, InputParameter.class), "BODY_INPUT").addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC).initializer("new $T().add($L).build()", ParameterizedTypeName.get(ImmutableList.Builder.class, InputParameter.class), Joiner.on(",").join(body_parameters.build())).build());
return builder.add(FieldSpec.builder(ParameterizedTypeName.get(ClassName.get(Map.class), ClassName.get(String.class), ParameterizedTypeName.get(List.class, InputParameter.class)), "INPUT").addModifiers(Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC).initializer("$T.of($T.INPUT_PARAMS, $L, $T.PATH_PARAMS, $L, $T.QUERY_PARAMS, $L)", ImmutableMap.class, CustomServicesConstants.class, "BODY_INPUT", CustomServicesConstants.class, "PATH_INPUT", CustomServicesConstants.class, "QUERY_INPUT").build()).build();
}
use of com.google.common.collect.ImmutableList.builder in project airlift by airlift.
the class Bootstrap method initialize.
public Injector initialize() {
checkState(!initialized, "Already initialized");
initialized = true;
Logging logging = null;
if (initializeLogging) {
logging = Logging.initialize();
}
Thread.currentThread().setUncaughtExceptionHandler((thread, throwable) -> log.error(throwable, "Uncaught exception in thread %s", thread.getName()));
Map<String, String> requiredProperties;
if (requiredConfigurationProperties == null) {
// initialize configuration
log.info("Loading configuration");
requiredProperties = Collections.emptyMap();
String configFile = System.getProperty("config");
if (configFile != null) {
try {
requiredProperties = loadPropertiesFrom(configFile);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
} else {
requiredProperties = requiredConfigurationProperties;
}
Map<String, String> unusedProperties = new TreeMap<>(requiredProperties);
// combine property sources
Map<String, String> properties = new HashMap<>();
if (optionalConfigurationProperties != null) {
properties.putAll(optionalConfigurationProperties);
}
properties.putAll(requiredProperties);
properties.putAll(getSystemProperties());
// replace environment variables in property values
List<Message> errors = new ArrayList<>();
properties = replaceEnvironmentVariables(properties, System.getenv(), (key, error) -> {
unusedProperties.remove(key);
errors.add(new Message(error));
});
// create configuration factory
properties = ImmutableSortedMap.copyOf(properties);
List<Message> warnings = new ArrayList<>();
ConfigurationFactory configurationFactory = new ConfigurationFactory(properties, warning -> warnings.add(new Message(warning)));
Boolean quietConfig = configurationFactory.build(BootstrapConfig.class).getQuiet();
// initialize logging
if (logging != null) {
log.info("Initializing logging");
LoggingConfiguration configuration = configurationFactory.build(LoggingConfiguration.class);
logging.configure(configuration);
}
// Register configuration classes defined in the modules
configurationFactory.registerConfigurationClasses(modules);
// Validate configuration classes
errors.addAll(configurationFactory.validateRegisteredConfigurationProvider());
// at this point all config file properties should be used
// so we can calculate the unused properties
unusedProperties.keySet().removeAll(configurationFactory.getUsedProperties());
for (String key : unusedProperties.keySet()) {
Message message = new Message(format("Configuration property '%s' was not used", key));
(strictConfig ? errors : warnings).add(message);
}
// If there are configuration errors, fail-fast to keep output clean
if (!errors.isEmpty()) {
throw new ApplicationConfigurationException(errors, warnings);
}
// Log effective configuration
if (!((quietConfig == null) ? quiet : quietConfig)) {
logConfiguration(configurationFactory);
}
// Log any warnings
if (!warnings.isEmpty()) {
StringBuilder message = new StringBuilder();
message.append("Configuration warnings\n");
message.append("==========\n\n");
message.append("Configuration should be updated:\n\n");
for (int index = 0; index < warnings.size(); index++) {
message.append(format("%s) %s\n", index + 1, warnings.get(index)));
}
message.append("\n");
message.append("==========");
log.warn(message.toString());
}
// system modules
Builder<Module> moduleList = ImmutableList.builder();
moduleList.add(new LifeCycleModule());
moduleList.add(new ConfigurationModule(configurationFactory));
moduleList.add(binder -> binder.bind(WarningsMonitor.class).toInstance(log::warn));
// disable broken Guice "features"
moduleList.add(Binder::disableCircularProxies);
moduleList.add(Binder::requireExplicitBindings);
moduleList.add(Binder::requireExactBindingAnnotations);
moduleList.addAll(modules);
// create the injector
Injector injector = Guice.createInjector(Stage.PRODUCTION, moduleList.build());
// Create the life-cycle manager
LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class);
// Start services
lifeCycleManager.start();
return injector;
}
Aggregations