Search in sources :

Example 1 with Sets

use of org.apache.flink.shaded.guava30.com.google.common.collect.Sets in project gerrit by GerritCodeReview.

the class ChangeNotesParser method updatePatchSetStates.

private void updatePatchSetStates() {
    Set<PatchSet.Id> missing = new TreeSet<>(ReviewDbUtil.intKeyOrdering());
    for (Iterator<PatchSet> it = patchSets.values().iterator(); it.hasNext(); ) {
        PatchSet ps = it.next();
        if (ps.getRevision().equals(PARTIAL_PATCH_SET)) {
            missing.add(ps.getId());
            it.remove();
        }
    }
    for (Map.Entry<PatchSet.Id, PatchSetState> e : patchSetStates.entrySet()) {
        switch(e.getValue()) {
            case PUBLISHED:
            default:
                break;
            case DELETED:
                patchSets.remove(e.getKey());
                break;
            case DRAFT:
                PatchSet ps = patchSets.get(e.getKey());
                if (ps != null) {
                    ps.setDraft(true);
                }
                break;
        }
    }
    // Post-process other collections to remove items corresponding to deleted
    // (or otherwise missing) patch sets. This is safer than trying to prevent
    // insertion, as it will also filter out items racily added after the patch
    // set was deleted.
    changeMessagesByPatchSet.keys().retainAll(patchSets.keySet());
    int pruned = pruneEntitiesForMissingPatchSets(allChangeMessages, ChangeMessage::getPatchSetId, missing);
    pruned += pruneEntitiesForMissingPatchSets(comments.values(), c -> new PatchSet.Id(id, c.key.patchSetId), missing);
    pruned += pruneEntitiesForMissingPatchSets(approvals.values(), PatchSetApproval::getPatchSetId, missing);
    if (!missing.isEmpty()) {
        log.warn("ignoring {} additional entities due to missing patch sets: {}", pruned, missing);
    }
}
Also used : PatchLineComment(com.google.gerrit.reviewdb.client.PatchLineComment) FOOTER_SUBMITTED_WITH(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_SUBMITTED_WITH) ListMultimap(com.google.common.collect.ListMultimap) MultimapBuilder(com.google.common.collect.MultimapBuilder) LoggerFactory(org.slf4j.LoggerFactory) FOOTER_HASHTAGS(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_HASHTAGS) HashBasedTable(com.google.common.collect.HashBasedTable) PatchSetApproval(com.google.gerrit.reviewdb.client.PatchSetApproval) ReviewerSet(com.google.gerrit.server.ReviewerSet) FooterKey(org.eclipse.jgit.revwalk.FooterKey) FOOTER_ASSIGNEE(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_ASSIGNEE) Comment(com.google.gerrit.reviewdb.client.Comment) InvalidObjectIdException(org.eclipse.jgit.errors.InvalidObjectIdException) FOOTER_PATCH_SET_DESCRIPTION(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_PATCH_SET_DESCRIPTION) Tables(com.google.common.collect.Tables) Locale(java.util.Locale) Map(java.util.Map) GitDateParser(org.eclipse.jgit.util.GitDateParser) ParseException(java.text.ParseException) Splitter(com.google.common.base.Splitter) NoteMap(org.eclipse.jgit.notes.NoteMap) LinkedListMultimap(com.google.common.collect.LinkedListMultimap) Timer1(com.google.gerrit.metrics.Timer1) FOOTER_WORK_IN_PROGRESS(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_WORK_IN_PROGRESS) CHANGES(com.google.gerrit.server.notedb.NoteDbTable.CHANGES) ImmutableSet(com.google.common.collect.ImmutableSet) FOOTER_SUBJECT(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_SUBJECT) ChangeMessage(com.google.gerrit.reviewdb.client.ChangeMessage) Timestamp(java.sql.Timestamp) Collection(java.util.Collection) RawParseUtils(org.eclipse.jgit.util.RawParseUtils) ChangeNotesRevWalk(com.google.gerrit.server.notedb.ChangeNotesCommit.ChangeNotesRevWalk) Set(java.util.Set) ReviewerStatusUpdate(com.google.gerrit.server.ReviewerStatusUpdate) LabelType(com.google.gerrit.common.data.LabelType) Collectors.joining(java.util.stream.Collectors.joining) Sets(com.google.common.collect.Sets) Objects(java.util.Objects) PersonIdent(org.eclipse.jgit.lib.PersonIdent) List(java.util.List) FOOTER_STATUS(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_STATUS) RefNames(com.google.gerrit.reviewdb.client.RefNames) AutoValue(com.google.auto.value.AutoValue) Optional(java.util.Optional) FOOTER_LABEL(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_LABEL) LabelId(com.google.gerrit.reviewdb.client.LabelId) FOOTER_READ_ONLY_UNTIL(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_READ_ONLY_UNTIL) FOOTER_SUBMISSION_ID(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_SUBMISSION_ID) FOOTER_COMMIT(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_COMMIT) FOOTER_CHANGE_ID(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_CHANGE_ID) ConfigInvalidException(org.eclipse.jgit.errors.ConfigInvalidException) Change(com.google.gerrit.reviewdb.client.Change) HashMap(java.util.HashMap) Function(java.util.function.Function) TreeSet(java.util.TreeSet) Enums(com.google.common.base.Enums) SubmitRecord(com.google.gerrit.common.data.SubmitRecord) ArrayList(java.util.ArrayList) FOOTER_TAG(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_TAG) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) FOOTER_REAL_USER(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_REAL_USER) Lists(com.google.common.collect.Lists) Charset(java.nio.charset.Charset) Account(com.google.gerrit.reviewdb.client.Account) FOOTER_BRANCH(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_BRANCH) FOOTER_PATCH_SET(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_PATCH_SET) FOOTER_GROUPS(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_GROUPS) FOOTER_PRIVATE(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_PRIVATE) FOOTER_TOPIC(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_TOPIC) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) FOOTER_CURRENT(com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_CURRENT) ReviewDbUtil(com.google.gerrit.reviewdb.server.ReviewDbUtil) IOException(java.io.IOException) Ints(com.google.common.primitives.Ints) ObjectId(org.eclipse.jgit.lib.ObjectId) Address(com.google.gerrit.server.mail.Address) PatchSet(com.google.gerrit.reviewdb.client.PatchSet) LabelVote(com.google.gerrit.server.util.LabelVote) ReviewerByEmailSet(com.google.gerrit.server.ReviewerByEmailSet) Table(com.google.common.collect.Table) Collections(java.util.Collections) ObjectReader(org.eclipse.jgit.lib.ObjectReader) RevId(com.google.gerrit.reviewdb.client.RevId) TreeSet(java.util.TreeSet) ChangeMessage(com.google.gerrit.reviewdb.client.ChangeMessage) PatchSet(com.google.gerrit.reviewdb.client.PatchSet) LabelId(com.google.gerrit.reviewdb.client.LabelId) ObjectId(org.eclipse.jgit.lib.ObjectId) RevId(com.google.gerrit.reviewdb.client.RevId) Map(java.util.Map) NoteMap(org.eclipse.jgit.notes.NoteMap) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 2 with Sets

use of org.apache.flink.shaded.guava30.com.google.common.collect.Sets in project xtext-core by eclipse.

the class CliWizardIntegrationTest method compareFileTrees.

private void compareFileTrees(final Set<CliWizardIntegrationTest.GeneratedFile> expectedFiles, final Set<CliWizardIntegrationTest.GeneratedFile> actualFiles) {
    final Function1<CliWizardIntegrationTest.GeneratedFile, String> _function = (CliWizardIntegrationTest.GeneratedFile it) -> {
        return it.relativePath;
    };
    final Map<String, CliWizardIntegrationTest.GeneratedFile> expectedFilesByPath = IterableExtensions.<String, CliWizardIntegrationTest.GeneratedFile>toMap(expectedFiles, _function);
    final Function1<CliWizardIntegrationTest.GeneratedFile, String> _function_1 = (CliWizardIntegrationTest.GeneratedFile it) -> {
        return it.relativePath;
    };
    final Map<String, CliWizardIntegrationTest.GeneratedFile> actualFilesByPath = IterableExtensions.<String, CliWizardIntegrationTest.GeneratedFile>toMap(actualFiles, _function_1);
    final Sets.SetView<CliWizardIntegrationTest.GeneratedFile> missingFiles = Sets.<CliWizardIntegrationTest.GeneratedFile>difference(expectedFiles, actualFiles);
    final Sets.SetView<CliWizardIntegrationTest.GeneratedFile> unexpectedFiles = Sets.<CliWizardIntegrationTest.GeneratedFile>difference(actualFiles, expectedFiles);
    final Sets.SetView<CliWizardIntegrationTest.GeneratedFile> comparableFiles = Sets.<CliWizardIntegrationTest.GeneratedFile>intersection(expectedFiles, actualFiles);
    final Consumer<CliWizardIntegrationTest.GeneratedFile> _function_2 = (CliWizardIntegrationTest.GeneratedFile it) -> {
        StringConcatenation _builder = new StringConcatenation();
        _builder.append("Missing file: ");
        _builder.append(it.relativePath);
        throw new ComparisonFailure(_builder.toString(), it.content, "");
    };
    missingFiles.forEach(_function_2);
    final Consumer<CliWizardIntegrationTest.GeneratedFile> _function_3 = (CliWizardIntegrationTest.GeneratedFile it) -> {
        StringConcatenation _builder = new StringConcatenation();
        _builder.append("Unexpected file: ");
        _builder.append(it.relativePath);
        throw new ComparisonFailure(_builder.toString(), "", it.content);
    };
    unexpectedFiles.forEach(_function_3);
    final Consumer<CliWizardIntegrationTest.GeneratedFile> _function_4 = (CliWizardIntegrationTest.GeneratedFile it) -> {
        final String expectedContent = LineDelimiters.toUnix(expectedFilesByPath.get(it.relativePath).content);
        final String actualContent = LineDelimiters.toUnix(actualFilesByPath.get(it.relativePath).content);
        Assert.assertEquals(it.relativePath, expectedContent, actualContent);
    };
    comparableFiles.forEach(_function_4);
}
Also used : Sets(com.google.common.collect.Sets) ComparisonFailure(org.junit.ComparisonFailure) StringConcatenation(org.eclipse.xtend2.lib.StringConcatenation)

Example 3 with Sets

use of org.apache.flink.shaded.guava30.com.google.common.collect.Sets in project incubator-pulsar by apache.

the class DispatcherBlockConsumerTest method testBrokerSubscriptionRecovery.

/**
 * <pre>
 * It verifies that cursor-recovery
 * 1. recovers individualDeletedMessages
 * 2. sets readPosition with last acked-message
 * 3. replay all unack messages
 * </pre>
 *
 * @throws Exception
 */
@Test(dataProvider = "gracefulUnload")
public void testBrokerSubscriptionRecovery(boolean unloadBundleGracefully) throws Exception {
    log.info("-- Starting {} test --", methodName);
    final String topicName = "persistent://my-property/use/my-ns/unacked-topic";
    final String subscriberName = "subscriber-1";
    final int totalProducedMsgs = 500;
    Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriberName).subscriptionType(SubscriptionType.Shared).subscribe();
    Producer<byte[]> producer = pulsarClient.newProducer().topic("persistent://my-property/use/my-ns/unacked-topic").create();
    CountDownLatch latch = new CountDownLatch(totalProducedMsgs);
    // (1) Produced Messages
    for (int i = 0; i < totalProducedMsgs; i++) {
        String message = "my-message-" + i;
        producer.sendAsync(message.getBytes()).thenAccept(msg -> latch.countDown());
    }
    latch.await();
    // (2) consume all messages except: unackMessages-set
    Set<Integer> unackMessages = Sets.newHashSet(5, 10, 20, 21, 22, 23, 25, 26, 30, 32, 40, 80, 160, 320);
    int receivedMsgCount = 0;
    for (int i = 0; i < totalProducedMsgs; i++) {
        Message<?> msg = consumer.receive(500, TimeUnit.MILLISECONDS);
        if (!unackMessages.contains(i)) {
            consumer.acknowledge(msg);
        }
        receivedMsgCount++;
    }
    assertEquals(totalProducedMsgs, receivedMsgCount);
    consumer.close();
    // if broker unload bundle gracefully then cursor metadata recovered from zk else from ledger
    if (unloadBundleGracefully) {
        // set clean namespace which will not let broker unload bundle gracefully: stop broker
        Supplier<NamespaceService> namespaceServiceSupplier = () -> spy(new NamespaceService(pulsar));
        doReturn(namespaceServiceSupplier).when(pulsar).getNamespaceServiceProvider();
    }
    stopBroker();
    // start broker which will recover topic-cursor from the ledger
    startBroker();
    consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriberName).subscriptionType(SubscriptionType.Shared).subscribe();
    // consumer should only receive unakced messages
    Set<String> unackMsgs = unackMessages.stream().map(i -> "my-message-" + i).collect(Collectors.toSet());
    Set<String> receivedMsgs = Sets.newHashSet();
    for (int i = 0; i < totalProducedMsgs; i++) {
        Message<?> msg = consumer.receive(500, TimeUnit.MILLISECONDS);
        if (msg == null) {
            break;
        }
        receivedMsgs.add(new String(msg.getData()));
    }
    receivedMsgs.removeAll(unackMsgs);
    assertTrue(receivedMsgs.isEmpty());
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ArrayListMultimap(com.google.common.collect.ArrayListMultimap) SubscriptionStats(org.apache.pulsar.common.policies.data.SubscriptionStats) Lists(org.testng.collections.Lists) PersistentTopicStats(org.apache.pulsar.common.policies.data.PersistentTopicStats) DataProvider(org.testng.annotations.DataProvider) LoggerFactory(org.slf4j.LoggerFactory) Assert.assertEquals(org.testng.Assert.assertEquals) Test(org.testng.annotations.Test) NamespaceService(org.apache.pulsar.broker.namespace.NamespaceService) Mockito.spy(org.mockito.Mockito.spy) AfterMethod(org.testng.annotations.AfterMethod) Multimap(com.google.common.collect.Multimap) Supplier(java.util.function.Supplier) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Mockito.doReturn(org.mockito.Mockito.doReturn) PersistentDispatcherMultipleConsumers(org.apache.pulsar.broker.service.persistent.PersistentDispatcherMultipleConsumers) Assert.assertNotEquals(org.testng.Assert.assertNotEquals) ConsumerImpl(org.apache.pulsar.client.impl.ConsumerImpl) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) Assert.fail(org.testng.Assert.fail) BeforeMethod(org.testng.annotations.BeforeMethod) Set(java.util.Set) Assert.assertNotNull(org.testng.Assert.assertNotNull) BrokerService(org.apache.pulsar.broker.service.BrokerService) Field(java.lang.reflect.Field) Collectors(java.util.stream.Collectors) Maps(com.google.common.collect.Maps) Executors(java.util.concurrent.Executors) Sets(com.google.common.collect.Sets) TimeUnit(java.util.concurrent.TimeUnit) MessageIdImpl(org.apache.pulsar.client.impl.MessageIdImpl) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) PersistentTopic(org.apache.pulsar.broker.service.persistent.PersistentTopic) ConcurrentOpenHashSet(org.apache.pulsar.common.util.collections.ConcurrentOpenHashSet) Queues(com.google.common.collect.Queues) Assert.assertTrue(org.testng.Assert.assertTrue) Queue(java.util.Queue) NamespaceService(org.apache.pulsar.broker.namespace.NamespaceService) CountDownLatch(java.util.concurrent.CountDownLatch) Test(org.testng.annotations.Test)

Example 4 with Sets

use of org.apache.flink.shaded.guava30.com.google.common.collect.Sets in project alien4cloud by alien4cloud.

the class ArchiveRootPostProcessor method processImports.

/**
 * Process imports within the archive and compute its complete dependency set.
 * Resolve all dependency version conflicts using the following rules:
 * <ul>
 * <li>If two direct dependencies conflict with each other, use the latest version</li>
 * <li>If a transitive dependency conflicts with a direct dependency, use the direct dependency version</li>
 * <li>If two transitive dependency conflict with each other, use the latest version.</li>
 * </ul>
 *
 * @param archiveRoot The archive to process.
 */
private void processImports(ArchiveRoot archiveRoot) {
    if (archiveRoot.getArchive().getDependencies() == null || archiveRoot.getArchive().getDependencies().isEmpty()) {
        return;
    }
    // Dependencies defined in the import section only
    // These should override transitive deps regardless of type of conflict ?
    Set<CSARDependency> dependencies = archiveRoot.getArchive().getDependencies();
    // Ensure the archive does not import itself
    Csar archive = archiveRoot.getArchive();
    if (dependencies.contains(new CSARDependency(archive.getName(), archive.getVersion(), archive.getHash()))) {
        ParsingContextExecution.getParsingErrors().add(new ParsingError(ParsingErrorLevel.ERROR, ErrorCode.CSAR_IMPORT_ITSELF, AlienUtils.prefixWith(":", archive.getVersion(), archive.getName()), null, "Import itself", null, null));
    }
    /*
         * Three types of conflicts :
         * - A transitive dep has a different version than a direct dependency => Force transitive to direct version
         * - Transitive dependencies with the same name and different version are used => Use latest
         * - Direct dependencies with the same name and different version are used => Error or use latest ?
         */
    // 1. Resolve all direct dependencies using latest version
    dependencies.removeIf(dependency -> dependencyConflictsWithLatest(dependency, dependencies));
    // Compute all distinct transitives dependencies
    final Set<CSARDependency> transitiveDependencies = new HashSet<>(dependencies.stream().map(csarDependency -> ToscaContext.get().getArchive(csarDependency.getName(), csarDependency.getVersion())).map(Csar::getDependencies).filter(c -> c != null).reduce(Sets::union).orElse(Collections.emptySet()));
    // 2. Resolve all transitive vs. direct dependencies conflicts using the direct dependency's version
    transitiveDependencies.removeIf(transitiveDependency -> dependencyConflictsWithDirect(transitiveDependency, dependencies));
    // 3. Resolve all transitive dependencies conflicts using latest version
    transitiveDependencies.removeIf(transitiveDependency -> dependencyConflictsWithLatest(transitiveDependency, transitiveDependencies));
    // Merge all dependencies (direct + transitives)
    final Set<CSARDependency> mergedDependencies = new HashSet<>(Sets.union(dependencies, transitiveDependencies));
    archiveRoot.getArchive().setDependencies(mergedDependencies);
    // Update Tosca context with the complete dependency set
    ToscaContext.get().resetDependencies(mergedDependencies);
}
Also used : DataType(org.alien4cloud.tosca.model.types.DataType) VersionUtil(alien4cloud.utils.VersionUtil) AlienUtils.safe(alien4cloud.utils.AlienUtils.safe) HashSet(java.util.HashSet) Node(org.yaml.snakeyaml.nodes.Node) ArchiveRoot(alien4cloud.tosca.model.ArchiveRoot) PropertyValue(org.alien4cloud.tosca.model.definitions.PropertyValue) AbstractPropertyValue(org.alien4cloud.tosca.model.definitions.AbstractPropertyValue) Map(java.util.Map) CSARDependency(org.alien4cloud.tosca.model.CSARDependency) RepositoryDefinition(org.alien4cloud.tosca.model.definitions.RepositoryDefinition) ToscaContext(alien4cloud.tosca.context.ToscaContext) PropertyUtil(alien4cloud.utils.PropertyUtil) PropertyDefinition(org.alien4cloud.tosca.model.definitions.PropertyDefinition) Csar(org.alien4cloud.tosca.model.Csar) MapUtils(org.apache.commons.collections.MapUtils) AlienUtils(alien4cloud.utils.AlienUtils) ParsingErrorLevel(alien4cloud.tosca.parser.ParsingErrorLevel) Resource(javax.annotation.Resource) Set(java.util.Set) ErrorCode(alien4cloud.tosca.parser.impl.ErrorCode) Sets(com.google.common.collect.Sets) Objects(java.util.Objects) Component(org.springframework.stereotype.Component) ParsingContextExecution(alien4cloud.tosca.parser.ParsingContextExecution) NormativeCredentialConstant(org.alien4cloud.tosca.normative.constants.NormativeCredentialConstant) Collections(java.util.Collections) ParsingError(alien4cloud.tosca.parser.ParsingError) Csar(org.alien4cloud.tosca.model.Csar) ParsingError(alien4cloud.tosca.parser.ParsingError) Sets(com.google.common.collect.Sets) CSARDependency(org.alien4cloud.tosca.model.CSARDependency) HashSet(java.util.HashSet)

Example 5 with Sets

use of org.apache.flink.shaded.guava30.com.google.common.collect.Sets in project presto by prestodb.

the class QueryPlanner method aggregate.

private PlanBuilder aggregate(PlanBuilder subPlan, QuerySpecification node) {
    if (!analysis.isAggregation(node)) {
        return subPlan;
    }
    // 1. Pre-project all scalar inputs (arguments and non-trivial group by expressions)
    Set<Expression> groupByExpressions = ImmutableSet.copyOf(analysis.getGroupByExpressions(node));
    ImmutableList.Builder<Expression> arguments = ImmutableList.builder();
    analysis.getAggregates(node).stream().map(FunctionCall::getArguments).flatMap(List::stream).filter(// lambda expression is generated at execution time
    exp -> !(exp instanceof LambdaExpression)).forEach(arguments::add);
    analysis.getAggregates(node).stream().map(FunctionCall::getOrderBy).filter(Optional::isPresent).map(Optional::get).map(OrderBy::getSortItems).flatMap(List::stream).map(SortItem::getSortKey).forEach(arguments::add);
    // filter expressions need to be projected first
    analysis.getAggregates(node).stream().map(FunctionCall::getFilter).filter(Optional::isPresent).map(Optional::get).forEach(arguments::add);
    Iterable<Expression> inputs = Iterables.concat(groupByExpressions, arguments.build());
    subPlan = handleSubqueries(subPlan, node, inputs);
    if (!Iterables.isEmpty(inputs)) {
        // avoid an empty projection if the only aggregation is COUNT (which has no arguments)
        subPlan = project(subPlan, inputs);
    }
    // 2. Aggregate
    // 2.a. Rewrite aggregate arguments
    TranslationMap argumentTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToVariableMap);
    ImmutableList.Builder<VariableReferenceExpression> aggregationArgumentsBuilder = ImmutableList.builder();
    for (Expression argument : arguments.build()) {
        VariableReferenceExpression variable = subPlan.translate(argument);
        argumentTranslations.put(argument, variable);
        aggregationArgumentsBuilder.add(variable);
    }
    List<VariableReferenceExpression> aggregationArguments = aggregationArgumentsBuilder.build();
    // 2.b. Rewrite grouping columns
    TranslationMap groupingTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToVariableMap);
    Map<VariableReferenceExpression, VariableReferenceExpression> groupingSetMappings = new LinkedHashMap<>();
    for (Expression expression : groupByExpressions) {
        VariableReferenceExpression input = subPlan.translate(expression);
        VariableReferenceExpression output = variableAllocator.newVariable(expression, analysis.getTypeWithCoercions(expression), "gid");
        groupingTranslations.put(expression, output);
        groupingSetMappings.put(output, input);
    }
    // This tracks the grouping sets before complex expressions are considered (see comments below)
    // It's also used to compute the descriptors needed to implement grouping()
    List<Set<FieldId>> columnOnlyGroupingSets = ImmutableList.of(ImmutableSet.of());
    List<List<VariableReferenceExpression>> groupingSets = ImmutableList.of(ImmutableList.of());
    if (node.getGroupBy().isPresent()) {
        // For the purpose of "distinct", we need to canonicalize column references that may have varying
        // syntactic forms (e.g., "t.a" vs "a"). Thus we need to enumerate grouping sets based on the underlying
        // fieldId associated with each column reference expression.
        // The catch is that simple group-by expressions can be arbitrary expressions (this is a departure from the SQL specification).
        // But, they don't affect the number of grouping sets or the behavior of "distinct" . We can compute all the candidate
        // grouping sets in terms of fieldId, dedup as appropriate and then cross-join them with the complex expressions.
        Analysis.GroupingSetAnalysis groupingSetAnalysis = analysis.getGroupingSets(node);
        columnOnlyGroupingSets = enumerateGroupingSets(groupingSetAnalysis);
        if (node.getGroupBy().get().isDistinct()) {
            columnOnlyGroupingSets = columnOnlyGroupingSets.stream().distinct().collect(toImmutableList());
        }
        // add in the complex expressions an turn materialize the grouping sets in terms of plan columns
        ImmutableList.Builder<List<VariableReferenceExpression>> groupingSetBuilder = ImmutableList.builder();
        for (Set<FieldId> groupingSet : columnOnlyGroupingSets) {
            ImmutableList.Builder<VariableReferenceExpression> columns = ImmutableList.builder();
            groupingSetAnalysis.getComplexExpressions().stream().map(groupingTranslations::get).forEach(columns::add);
            groupingSet.stream().map(field -> groupingTranslations.get(new FieldReference(field.getFieldIndex()))).forEach(columns::add);
            groupingSetBuilder.add(columns.build());
        }
        groupingSets = groupingSetBuilder.build();
    }
    // 2.c. Generate GroupIdNode (multiple grouping sets) or ProjectNode (single grouping set)
    Optional<VariableReferenceExpression> groupIdVariable = Optional.empty();
    if (groupingSets.size() > 1) {
        groupIdVariable = Optional.of(variableAllocator.newVariable("groupId", BIGINT));
        GroupIdNode groupId = new GroupIdNode(subPlan.getRoot().getSourceLocation(), idAllocator.getNextId(), subPlan.getRoot(), groupingSets, groupingSetMappings, aggregationArguments, groupIdVariable.get());
        subPlan = new PlanBuilder(groupingTranslations, groupId);
    } else {
        Assignments.Builder assignments = Assignments.builder();
        aggregationArguments.stream().map(AssignmentUtils::identityAsSymbolReference).forEach(assignments::put);
        groupingSetMappings.forEach((key, value) -> assignments.put(key, castToRowExpression(asSymbolReference(value))));
        ProjectNode project = new ProjectNode(subPlan.getRoot().getSourceLocation(), idAllocator.getNextId(), subPlan.getRoot(), assignments.build(), LOCAL);
        subPlan = new PlanBuilder(groupingTranslations, project);
    }
    TranslationMap aggregationTranslations = new TranslationMap(subPlan.getRelationPlan(), analysis, lambdaDeclarationToVariableMap);
    aggregationTranslations.copyMappingsFrom(groupingTranslations);
    // 2.d. Rewrite aggregates
    ImmutableMap.Builder<VariableReferenceExpression, Aggregation> aggregationsBuilder = ImmutableMap.builder();
    boolean needPostProjectionCoercion = false;
    for (FunctionCall aggregate : analysis.getAggregates(node)) {
        Expression rewritten = argumentTranslations.rewrite(aggregate);
        VariableReferenceExpression newVariable = variableAllocator.newVariable(rewritten, analysis.getType(aggregate));
        // Therefore we can end up with this implicit cast, and have to move it into a post-projection
        if (rewritten instanceof Cast) {
            rewritten = ((Cast) rewritten).getExpression();
            needPostProjectionCoercion = true;
        }
        aggregationTranslations.put(aggregate, newVariable);
        FunctionCall rewrittenFunction = (FunctionCall) rewritten;
        aggregationsBuilder.put(newVariable, new Aggregation(new CallExpression(getSourceLocation(rewrittenFunction), aggregate.getName().getSuffix(), analysis.getFunctionHandle(aggregate), analysis.getType(aggregate), rewrittenFunction.getArguments().stream().map(OriginalExpressionUtils::castToRowExpression).collect(toImmutableList())), rewrittenFunction.getFilter().map(OriginalExpressionUtils::castToRowExpression), rewrittenFunction.getOrderBy().map(orderBy -> toOrderingScheme(orderBy, variableAllocator.getTypes())), rewrittenFunction.isDistinct(), Optional.empty()));
    }
    Map<VariableReferenceExpression, Aggregation> aggregations = aggregationsBuilder.build();
    ImmutableSet.Builder<Integer> globalGroupingSets = ImmutableSet.builder();
    for (int i = 0; i < groupingSets.size(); i++) {
        if (groupingSets.get(i).isEmpty()) {
            globalGroupingSets.add(i);
        }
    }
    ImmutableList.Builder<VariableReferenceExpression> groupingKeys = ImmutableList.builder();
    groupingSets.stream().flatMap(List::stream).distinct().forEach(groupingKeys::add);
    groupIdVariable.ifPresent(groupingKeys::add);
    AggregationNode aggregationNode = new AggregationNode(subPlan.getRoot().getSourceLocation(), idAllocator.getNextId(), subPlan.getRoot(), aggregations, groupingSets(groupingKeys.build(), groupingSets.size(), globalGroupingSets.build()), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), groupIdVariable);
    subPlan = new PlanBuilder(aggregationTranslations, aggregationNode);
    // TODO: this is a hack, we should change type coercions to coerce the inputs to functions/operators instead of coercing the output
    if (needPostProjectionCoercion) {
        ImmutableList.Builder<Expression> alreadyCoerced = ImmutableList.builder();
        alreadyCoerced.addAll(groupByExpressions);
        groupIdVariable.map(ExpressionTreeUtils::createSymbolReference).ifPresent(alreadyCoerced::add);
        subPlan = explicitCoercionFields(subPlan, alreadyCoerced.build(), analysis.getAggregates(node));
    }
    // 4. Project and re-write all grouping functions
    return handleGroupingOperations(subPlan, node, groupIdVariable, columnOnlyGroupingSets);
}
Also used : FINAL(com.facebook.presto.spi.plan.LimitNode.Step.FINAL) AggregationNode(com.facebook.presto.spi.plan.AggregationNode) SortNode(com.facebook.presto.sql.planner.plan.SortNode) OriginalExpressionUtils(com.facebook.presto.sql.relational.OriginalExpressionUtils) FrameBound(com.facebook.presto.sql.tree.FrameBound) VariableReferenceExpression(com.facebook.presto.spi.relation.VariableReferenceExpression) Field(com.facebook.presto.sql.analyzer.Field) WindowNodeUtil.toBoundType(com.facebook.presto.sql.planner.optimizations.WindowNodeUtil.toBoundType) ValuesNode(com.facebook.presto.spi.plan.ValuesNode) Delete(com.facebook.presto.sql.tree.Delete) Map(java.util.Map) LOCAL(com.facebook.presto.spi.plan.ProjectNode.Locality.LOCAL) AggregationNode.singleGroupingSet(com.facebook.presto.spi.plan.AggregationNode.singleGroupingSet) CallExpression(com.facebook.presto.spi.relation.CallExpression) OrderingScheme(com.facebook.presto.spi.plan.OrderingScheme) FunctionCall(com.facebook.presto.sql.tree.FunctionCall) OffsetNode(com.facebook.presto.sql.planner.plan.OffsetNode) SymbolReference(com.facebook.presto.sql.tree.SymbolReference) AssignmentUtils.identitiesAsSymbolReferences(com.facebook.presto.sql.planner.plan.AssignmentUtils.identitiesAsSymbolReferences) RelationId(com.facebook.presto.sql.analyzer.RelationId) ImmutableSet(com.google.common.collect.ImmutableSet) Query(com.facebook.presto.sql.tree.Query) WindowNodeUtil.toWindowType(com.facebook.presto.sql.planner.optimizations.WindowNodeUtil.toWindowType) SortOrder(com.facebook.presto.common.block.SortOrder) QuerySpecification(com.facebook.presto.sql.tree.QuerySpecification) ImmutableMap(com.google.common.collect.ImmutableMap) LambdaExpression(com.facebook.presto.sql.tree.LambdaExpression) Ordering(com.facebook.presto.spi.plan.Ordering) ExpressionTreeUtils(com.facebook.presto.sql.analyzer.ExpressionTreeUtils) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Node(com.facebook.presto.sql.tree.Node) Set(java.util.Set) SortItem(com.facebook.presto.sql.tree.SortItem) Sets(com.google.common.collect.Sets) LimitNode(com.facebook.presto.spi.plan.LimitNode) SystemSessionProperties.isSkipRedundantSort(com.facebook.presto.SystemSessionProperties.isSkipRedundantSort) List(java.util.List) Window(com.facebook.presto.sql.tree.Window) ProjectNode(com.facebook.presto.spi.plan.ProjectNode) ExpressionTreeUtils.getSourceLocation(com.facebook.presto.sql.analyzer.ExpressionTreeUtils.getSourceLocation) FieldId(com.facebook.presto.sql.analyzer.FieldId) Analysis(com.facebook.presto.sql.analyzer.Analysis) Optional(java.util.Optional) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) PlannerUtils.toOrderingScheme(com.facebook.presto.sql.planner.PlannerUtils.toOrderingScheme) IntStream(java.util.stream.IntStream) Iterables(com.google.common.collect.Iterables) LambdaArgumentDeclaration(com.facebook.presto.sql.tree.LambdaArgumentDeclaration) PlannerUtils.toSortOrder(com.facebook.presto.sql.planner.PlannerUtils.toSortOrder) GroupIdNode(com.facebook.presto.sql.planner.plan.GroupIdNode) Assignments(com.facebook.presto.spi.plan.Assignments) Expressions.call(com.facebook.presto.sql.relational.Expressions.call) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) WindowFrame(com.facebook.presto.sql.tree.WindowFrame) FilterNode(com.facebook.presto.spi.plan.FilterNode) AssignmentUtils(com.facebook.presto.sql.planner.plan.AssignmentUtils) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) TableHandle(com.facebook.presto.spi.TableHandle) Cast(com.facebook.presto.sql.tree.Cast) Type(com.facebook.presto.common.type.Type) RowExpression(com.facebook.presto.spi.relation.RowExpression) BIGINT(com.facebook.presto.common.type.BigintType.BIGINT) GroupingOperation(com.facebook.presto.sql.tree.GroupingOperation) OrderBy(com.facebook.presto.sql.tree.OrderBy) PlanNodeIdAllocator(com.facebook.presto.spi.plan.PlanNodeIdAllocator) WindowNode(com.facebook.presto.sql.planner.plan.WindowNode) Session(com.facebook.presto.Session) NodeLocation(com.facebook.presto.sql.tree.NodeLocation) NodeUtils.getSortItemsFromOrderBy(com.facebook.presto.sql.NodeUtils.getSortItemsFromOrderBy) RelationType(com.facebook.presto.sql.analyzer.RelationType) Offset(com.facebook.presto.sql.tree.Offset) VARBINARY(com.facebook.presto.common.type.VarbinaryType.VARBINARY) TupleDomain(com.facebook.presto.common.predicate.TupleDomain) DeleteNode(com.facebook.presto.sql.planner.plan.DeleteNode) NodeRef(com.facebook.presto.sql.tree.NodeRef) Streams.stream(com.google.common.collect.Streams.stream) Scope(com.facebook.presto.sql.analyzer.Scope) PlanNode(com.facebook.presto.spi.plan.PlanNode) AggregationNode.groupingSets(com.facebook.presto.spi.plan.AggregationNode.groupingSets) Expression(com.facebook.presto.sql.tree.Expression) ColumnHandle(com.facebook.presto.spi.ColumnHandle) TableScanNode(com.facebook.presto.spi.plan.TableScanNode) FieldReference(com.facebook.presto.sql.tree.FieldReference) Aggregation(com.facebook.presto.spi.plan.AggregationNode.Aggregation) OriginalExpressionUtils.asSymbolReference(com.facebook.presto.sql.relational.OriginalExpressionUtils.asSymbolReference) Metadata(com.facebook.presto.metadata.Metadata) OriginalExpressionUtils.castToRowExpression(com.facebook.presto.sql.relational.OriginalExpressionUtils.castToRowExpression) Assignments(com.facebook.presto.spi.plan.Assignments) LinkedHashMap(java.util.LinkedHashMap) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) Optional(java.util.Optional) ImmutableMap(com.google.common.collect.ImmutableMap) VariableReferenceExpression(com.facebook.presto.spi.relation.VariableReferenceExpression) Analysis(com.facebook.presto.sql.analyzer.Analysis) OriginalExpressionUtils(com.facebook.presto.sql.relational.OriginalExpressionUtils) LambdaExpression(com.facebook.presto.sql.tree.LambdaExpression) Cast(com.facebook.presto.sql.tree.Cast) AggregationNode.singleGroupingSet(com.facebook.presto.spi.plan.AggregationNode.singleGroupingSet) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) Aggregation(com.facebook.presto.spi.plan.AggregationNode.Aggregation) GroupIdNode(com.facebook.presto.sql.planner.plan.GroupIdNode) FunctionCall(com.facebook.presto.sql.tree.FunctionCall) CallExpression(com.facebook.presto.spi.relation.CallExpression) FieldReference(com.facebook.presto.sql.tree.FieldReference) AggregationNode(com.facebook.presto.spi.plan.AggregationNode) VariableReferenceExpression(com.facebook.presto.spi.relation.VariableReferenceExpression) CallExpression(com.facebook.presto.spi.relation.CallExpression) LambdaExpression(com.facebook.presto.sql.tree.LambdaExpression) RowExpression(com.facebook.presto.spi.relation.RowExpression) Expression(com.facebook.presto.sql.tree.Expression) OriginalExpressionUtils.castToRowExpression(com.facebook.presto.sql.relational.OriginalExpressionUtils.castToRowExpression) FieldId(com.facebook.presto.sql.analyzer.FieldId) ProjectNode(com.facebook.presto.spi.plan.ProjectNode)

Aggregations

Sets (com.google.common.collect.Sets)19 Set (java.util.Set)16 Map (java.util.Map)15 List (java.util.List)12 ArrayList (java.util.ArrayList)11 HashMap (java.util.HashMap)11 Collectors (java.util.stream.Collectors)11 HashSet (java.util.HashSet)10 IOException (java.io.IOException)9 Optional (java.util.Optional)9 Collections (java.util.Collections)8 Logger (org.slf4j.Logger)8 LoggerFactory (org.slf4j.LoggerFactory)8 Lists (com.google.common.collect.Lists)6 TimeUnit (java.util.concurrent.TimeUnit)6 ImmutableSet (com.google.common.collect.ImmutableSet)5 InputStream (java.io.InputStream)5 Collection (java.util.Collection)5 ImmutableMap (com.google.common.collect.ImmutableMap)4 Maps (com.google.common.collect.Maps)4