Search in sources :

Example 31 with Stream

use of java.util.stream.Stream in project spring-framework by spring-projects.

the class StreamConverterTests method convertFromListToRawStream.

@Test
@SuppressWarnings("resource")
public void convertFromListToRawStream() throws NoSuchFieldException {
    List<String> stream = Arrays.asList("1", "2", "3");
    TypeDescriptor streamOfInteger = new TypeDescriptor(Types.class.getField("rawStream"));
    ;
    Object result = this.conversionService.convert(stream, streamOfInteger);
    assertNotNull("Converted object must not be null", result);
    assertTrue("Converted object must be a stream", result instanceof Stream);
    @SuppressWarnings("unchecked") Stream<Object> content = (Stream<Object>) result;
    StringBuilder sb = new StringBuilder();
    content.forEach(sb::append);
    assertEquals("123", sb.toString());
}
Also used : TypeDescriptor(org.springframework.core.convert.TypeDescriptor) Stream(java.util.stream.Stream) Test(org.junit.Test)

Example 32 with Stream

use of java.util.stream.Stream in project cassandra by apache.

the class PartitionImplementationTest method testIter.

private void testIter(Supplier<Collection<? extends Unfiltered>> contentSupplier, Row staticRow) {
    NavigableSet<Clusterable> sortedContent = new TreeSet<Clusterable>(metadata.comparator);
    sortedContent.addAll(contentSupplier.get());
    AbstractBTreePartition partition;
    try (UnfilteredRowIterator iter = new Util.UnfilteredSource(metadata, Util.dk("pk"), staticRow, sortedContent.stream().map(x -> (Unfiltered) x).iterator())) {
        partition = ImmutableBTreePartition.create(iter);
    }
    ColumnMetadata defCol = metadata.getColumn(new ColumnIdentifier("col", true));
    ColumnFilter cf = ColumnFilter.selectionBuilder().add(defCol).build();
    Function<? super Clusterable, ? extends Clusterable> colFilter = x -> x instanceof Row ? ((Row) x).filter(cf, metadata) : x;
    Slices slices = Slices.with(metadata.comparator, Slice.make(clustering(KEY_RANGE / 4), clustering(KEY_RANGE * 3 / 4)));
    Slices multiSlices = makeSlices();
    // lastRow
    assertRowsEqual((Row) get(sortedContent.descendingSet(), x -> x instanceof Row), partition.lastRow());
    // get(static)
    assertRowsEqual(staticRow, partition.getRow(Clustering.STATIC_CLUSTERING));
    // get
    for (int i = 0; i < KEY_RANGE; ++i) {
        Clustering cl = clustering(i);
        assertRowsEqual(getRow(sortedContent, cl), partition.getRow(cl));
    }
    // isEmpty
    assertEquals(sortedContent.isEmpty() && staticRow == null, partition.isEmpty());
    // hasRows
    assertEquals(sortedContent.stream().anyMatch(x -> x instanceof Row), partition.hasRows());
    // iterator
    assertIteratorsEqual(sortedContent.stream().filter(x -> x instanceof Row).iterator(), partition.iterator());
    // unfiltered iterator
    assertIteratorsEqual(sortedContent.iterator(), partition.unfilteredIterator());
    // unfiltered iterator
    assertIteratorsEqual(sortedContent.iterator(), partition.unfilteredIterator(ColumnFilter.all(metadata), Slices.ALL, false));
    // column-filtered
    assertIteratorsEqual(sortedContent.stream().map(colFilter).iterator(), partition.unfilteredIterator(cf, Slices.ALL, false));
    // sliced
    assertIteratorsEqual(slice(sortedContent, slices.get(0)), partition.unfilteredIterator(ColumnFilter.all(metadata), slices, false));
    assertIteratorsEqual(streamOf(slice(sortedContent, slices.get(0))).map(colFilter).iterator(), partition.unfilteredIterator(cf, slices, false));
    // randomly multi-sliced
    assertIteratorsEqual(slice(sortedContent, multiSlices), partition.unfilteredIterator(ColumnFilter.all(metadata), multiSlices, false));
    assertIteratorsEqual(streamOf(slice(sortedContent, multiSlices)).map(colFilter).iterator(), partition.unfilteredIterator(cf, multiSlices, false));
    // reversed
    assertIteratorsEqual(sortedContent.descendingIterator(), partition.unfilteredIterator(ColumnFilter.all(metadata), Slices.ALL, true));
    assertIteratorsEqual(sortedContent.descendingSet().stream().map(colFilter).iterator(), partition.unfilteredIterator(cf, Slices.ALL, true));
    assertIteratorsEqual(invert(slice(sortedContent, slices.get(0))), partition.unfilteredIterator(ColumnFilter.all(metadata), slices, true));
    assertIteratorsEqual(streamOf(invert(slice(sortedContent, slices.get(0)))).map(colFilter).iterator(), partition.unfilteredIterator(cf, slices, true));
    assertIteratorsEqual(invert(slice(sortedContent, multiSlices)), partition.unfilteredIterator(ColumnFilter.all(metadata), multiSlices, true));
    assertIteratorsEqual(streamOf(invert(slice(sortedContent, multiSlices))).map(colFilter).iterator(), partition.unfilteredIterator(cf, multiSlices, true));
    // search iterator
    testSearchIterator(sortedContent, partition, ColumnFilter.all(metadata), false);
    testSearchIterator(sortedContent, partition, cf, false);
    testSearchIterator(sortedContent, partition, ColumnFilter.all(metadata), true);
    testSearchIterator(sortedContent, partition, cf, true);
    // sliceable iter
    testSlicingOfIterators(sortedContent, partition, ColumnFilter.all(metadata), false);
    testSlicingOfIterators(sortedContent, partition, cf, false);
    testSlicingOfIterators(sortedContent, partition, ColumnFilter.all(metadata), true);
    testSlicingOfIterators(sortedContent, partition, cf, true);
}
Also used : AbstractBTreePartition(org.apache.cassandra.db.partitions.AbstractBTreePartition) java.util(java.util) Iterables(com.google.common.collect.Iterables) ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) BeforeClass(org.junit.BeforeClass) SearchIterator(org.apache.cassandra.utils.SearchIterator) org.apache.cassandra.db(org.apache.cassandra.db) Deletion(org.apache.cassandra.db.rows.Row.Deletion) Function(java.util.function.Function) Supplier(java.util.function.Supplier) Iterators(com.google.common.collect.Iterators) org.apache.cassandra.db.rows(org.apache.cassandra.db.rows) Partition(org.apache.cassandra.db.partitions.Partition) ConfigurationException(org.apache.cassandra.exceptions.ConfigurationException) ImmutableBTreePartition(org.apache.cassandra.db.partitions.ImmutableBTreePartition) StreamSupport(java.util.stream.StreamSupport) ColumnFilter(org.apache.cassandra.db.filter.ColumnFilter) Predicate(java.util.function.Predicate) Util(org.apache.cassandra.Util) ByteBufferUtil(org.apache.cassandra.utils.ByteBufferUtil) KeyspaceParams(org.apache.cassandra.schema.KeyspaceParams) Test(org.junit.Test) Collectors(java.util.stream.Collectors) SchemaLoader(org.apache.cassandra.SchemaLoader) Stream(java.util.stream.Stream) ColumnIdentifier(org.apache.cassandra.cql3.ColumnIdentifier) TableMetadata(org.apache.cassandra.schema.TableMetadata) Assert(org.junit.Assert) AsciiType(org.apache.cassandra.db.marshal.AsciiType) ColumnMetadata(org.apache.cassandra.schema.ColumnMetadata) ColumnFilter(org.apache.cassandra.db.filter.ColumnFilter) AbstractBTreePartition(org.apache.cassandra.db.partitions.AbstractBTreePartition) ColumnIdentifier(org.apache.cassandra.cql3.ColumnIdentifier)

Example 33 with Stream

use of java.util.stream.Stream in project keywhiz by square.

the class ClientResource method modifyClientGroups.

/**
   * Modify groups a client has membership in
   *
   * @excludeParams automationClient
   * @param name Client name
   * @param request JSON request specifying which groups to add or remove
   * @return Listing of groups client has membership in
   *
   * @responseMessage 201 Client modified successfully
   * @responseMessage 404 Client not found
   */
@Timed
@ExceptionMetered
@PUT
@Path("{name}/groups")
@Produces(APPLICATION_JSON)
public Iterable<String> modifyClientGroups(@Auth AutomationClient automationClient, @PathParam("name") String name, @Valid ModifyGroupsRequestV2 request) {
    Client client = clientDAOReadWrite.getClient(name).orElseThrow(NotFoundException::new);
    String user = automationClient.getName();
    long clientId = client.getId();
    Set<String> oldGroups = aclDAOReadWrite.getGroupsFor(client).stream().map(Group::getName).collect(toSet());
    Set<String> groupsToAdd = Sets.difference(request.addGroups(), oldGroups);
    Set<String> groupsToRemove = Sets.intersection(request.removeGroups(), oldGroups);
    // TODO: should optimize AclDAO to use names and return only name column
    groupsToGroupIds(groupsToAdd).forEach((maybeGroupId) -> maybeGroupId.ifPresent((groupId) -> aclDAOReadWrite.findAndEnrollClient(clientId, groupId, auditLog, user, new HashMap<>())));
    groupsToGroupIds(groupsToRemove).forEach((maybeGroupId) -> maybeGroupId.ifPresent((groupId) -> aclDAOReadWrite.findAndEvictClient(clientId, groupId, auditLog, user, new HashMap<>())));
    return aclDAOReadWrite.getGroupsFor(client).stream().map(Group::getName).collect(toSet());
}
Also used : NotImplementedException(org.apache.commons.lang3.NotImplementedException) PathParam(javax.ws.rs.PathParam) AclDAO(keywhiz.service.daos.AclDAO) Produces(javax.ws.rs.Produces) ClientDAO(keywhiz.service.daos.ClientDAO) GET(javax.ws.rs.GET) ClientDetailResponseV2(keywhiz.api.automation.v2.ClientDetailResponseV2) Event(keywhiz.log.Event) Path(javax.ws.rs.Path) LoggerFactory(org.slf4j.LoggerFactory) Auth(io.dropwizard.auth.Auth) GroupDAOFactory(keywhiz.service.daos.GroupDAO.GroupDAOFactory) HashMap(java.util.HashMap) Inject(javax.inject.Inject) Valid(javax.validation.Valid) AutomationClient(keywhiz.api.model.AutomationClient) ClientDAOFactory(keywhiz.service.daos.ClientDAO.ClientDAOFactory) Consumes(javax.ws.rs.Consumes) ExceptionMetered(com.codahale.metrics.annotation.ExceptionMetered) ModifyGroupsRequestV2(keywhiz.api.automation.v2.ModifyGroupsRequestV2) UriBuilder(javax.ws.rs.core.UriBuilder) URI(java.net.URI) Client(keywhiz.api.model.Client) APPLICATION_JSON(javax.ws.rs.core.MediaType.APPLICATION_JSON) GroupDAO(keywhiz.service.daos.GroupDAO) Collectors.toSet(java.util.stream.Collectors.toSet) DELETE(javax.ws.rs.DELETE) CreateClientRequestV2(keywhiz.api.automation.v2.CreateClientRequestV2) AuditLog(keywhiz.log.AuditLog) ModifyClientRequestV2(keywhiz.api.automation.v2.ModifyClientRequestV2) Group(keywhiz.api.model.Group) POST(javax.ws.rs.POST) Logger(org.slf4j.Logger) Set(java.util.Set) AclDAOFactory(keywhiz.service.daos.AclDAO.AclDAOFactory) ConflictException(keywhiz.service.exceptions.ConflictException) Instant(java.time.Instant) Sets(com.google.common.collect.Sets) NotFoundException(javax.ws.rs.NotFoundException) String.format(java.lang.String.format) Timed(com.codahale.metrics.annotation.Timed) EventTag(keywhiz.log.EventTag) Stream(java.util.stream.Stream) Response(javax.ws.rs.core.Response) Optional(java.util.Optional) SanitizedSecret(keywhiz.api.model.SanitizedSecret) PUT(javax.ws.rs.PUT) NotFoundException(javax.ws.rs.NotFoundException) AutomationClient(keywhiz.api.model.AutomationClient) Client(keywhiz.api.model.Client) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) Timed(com.codahale.metrics.annotation.Timed) ExceptionMetered(com.codahale.metrics.annotation.ExceptionMetered) PUT(javax.ws.rs.PUT)

Example 34 with Stream

use of java.util.stream.Stream in project CoreNLP by stanfordnlp.

the class JSONOutputter method print.

/** {@inheritDoc} */
// It's lying; we need the "redundant" casts (as of 2014-09-08)
@SuppressWarnings("RedundantCast")
@Override
public void print(Annotation doc, OutputStream target, Options options) throws IOException {
    PrintWriter writer = new PrintWriter(IOUtils.encodedOutputStreamWriter(target, options.encoding));
    JSONWriter l0 = new JSONWriter(writer, options);
    l0.object(l1 -> {
        l1.set("docId", doc.get(CoreAnnotations.DocIDAnnotation.class));
        l1.set("docDate", doc.get(CoreAnnotations.DocDateAnnotation.class));
        l1.set("docSourceType", doc.get(CoreAnnotations.DocSourceTypeAnnotation.class));
        l1.set("docType", doc.get(CoreAnnotations.DocTypeAnnotation.class));
        l1.set("author", doc.get(CoreAnnotations.AuthorAnnotation.class));
        l1.set("location", doc.get(CoreAnnotations.LocationAnnotation.class));
        if (options.includeText) {
            l1.set("text", doc.get(CoreAnnotations.TextAnnotation.class));
        }
        if (doc.get(CoreAnnotations.SentencesAnnotation.class) != null) {
            l1.set("sentences", doc.get(CoreAnnotations.SentencesAnnotation.class).stream().map(sentence -> (Consumer<Writer>) (Writer l2) -> {
                l2.set("id", sentence.get(CoreAnnotations.SentenceIDAnnotation.class));
                l2.set("index", sentence.get(CoreAnnotations.SentenceIndexAnnotation.class));
                l2.set("line", sentence.get(CoreAnnotations.LineNumberAnnotation.class));
                StringWriter treeStrWriter = new StringWriter();
                TreePrint treePrinter = options.constituentTreePrinter;
                if (treePrinter == AnnotationOutputter.DEFAULT_CONSTITUENT_TREE_PRINTER) {
                    treePrinter = new TreePrint("oneline");
                }
                treePrinter.printTree(sentence.get(TreeCoreAnnotations.TreeAnnotation.class), new PrintWriter(treeStrWriter, true));
                String treeStr = treeStrWriter.toString().trim();
                if (!"SENTENCE_SKIPPED_OR_UNPARSABLE".equals(treeStr)) {
                    l2.set("parse", treeStr);
                }
                l2.set("basicDependencies", buildDependencyTree(sentence.get(SemanticGraphCoreAnnotations.BasicDependenciesAnnotation.class)));
                l2.set("enhancedDependencies", buildDependencyTree(sentence.get(SemanticGraphCoreAnnotations.EnhancedDependenciesAnnotation.class)));
                l2.set("enhancedPlusPlusDependencies", buildDependencyTree(sentence.get(SemanticGraphCoreAnnotations.EnhancedPlusPlusDependenciesAnnotation.class)));
                Tree sentimentTree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
                if (sentimentTree != null) {
                    int sentiment = RNNCoreAnnotations.getPredictedClass(sentimentTree);
                    String sentimentClass = sentence.get(SentimentCoreAnnotations.SentimentClass.class);
                    l2.set("sentimentValue", Integer.toString(sentiment));
                    l2.set("sentiment", sentimentClass.replaceAll(" ", ""));
                }
                Collection<RelationTriple> openIETriples = sentence.get(NaturalLogicAnnotations.RelationTriplesAnnotation.class);
                if (openIETriples != null) {
                    l2.set("openie", openIETriples.stream().map(triple -> (Consumer<Writer>) (Writer tripleWriter) -> {
                        tripleWriter.set("subject", triple.subjectGloss());
                        tripleWriter.set("subjectSpan", Span.fromPair(triple.subjectTokenSpan()));
                        tripleWriter.set("relation", triple.relationGloss());
                        tripleWriter.set("relationSpan", Span.fromPair(triple.relationTokenSpan()));
                        tripleWriter.set("object", triple.objectGloss());
                        tripleWriter.set("objectSpan", Span.fromPair(triple.objectTokenSpan()));
                    }));
                }
                Collection<RelationTriple> kbpTriples = sentence.get(CoreAnnotations.KBPTriplesAnnotation.class);
                if (kbpTriples != null) {
                    l2.set("kbp", kbpTriples.stream().map(triple -> (Consumer<Writer>) (Writer tripleWriter) -> {
                        tripleWriter.set("subject", triple.subjectGloss());
                        tripleWriter.set("subjectSpan", Span.fromPair(triple.subjectTokenSpan()));
                        tripleWriter.set("relation", triple.relationGloss());
                        tripleWriter.set("relationSpan", Span.fromPair(triple.relationTokenSpan()));
                        tripleWriter.set("object", triple.objectGloss());
                        tripleWriter.set("objectSpan", Span.fromPair(triple.objectTokenSpan()));
                    }));
                }
                if (sentence.get(CoreAnnotations.MentionsAnnotation.class) != null) {
                    Integer sentTokenBegin = sentence.get(CoreAnnotations.TokenBeginAnnotation.class);
                    l2.set("entitymentions", sentence.get(CoreAnnotations.MentionsAnnotation.class).stream().map(m -> (Consumer<Writer>) (Writer l3) -> {
                        Integer tokenBegin = m.get(CoreAnnotations.TokenBeginAnnotation.class);
                        Integer tokenEnd = m.get(CoreAnnotations.TokenEndAnnotation.class);
                        l3.set("docTokenBegin", tokenBegin);
                        l3.set("docTokenEnd", tokenEnd);
                        if (tokenBegin != null && sentTokenBegin != null) {
                            l3.set("tokenBegin", tokenBegin - sentTokenBegin);
                        }
                        if (tokenEnd != null && sentTokenBegin != null) {
                            l3.set("tokenEnd", tokenEnd - sentTokenBegin);
                        }
                        l3.set("text", m.get(CoreAnnotations.TextAnnotation.class));
                        l3.set("characterOffsetBegin", m.get(CoreAnnotations.CharacterOffsetBeginAnnotation.class));
                        l3.set("characterOffsetEnd", m.get(CoreAnnotations.CharacterOffsetEndAnnotation.class));
                        l3.set("ner", m.get(CoreAnnotations.NamedEntityTagAnnotation.class));
                        l3.set("normalizedNER", m.get(CoreAnnotations.NormalizedNamedEntityTagAnnotation.class));
                        l3.set("entitylink", m.get(CoreAnnotations.WikipediaEntityAnnotation.class));
                        Timex time = m.get(TimeAnnotations.TimexAnnotation.class);
                        if (time != null) {
                            Timex.Range range = time.range();
                            l3.set("timex", (Consumer<Writer>) l4 -> {
                                l4.set("tid", time.tid());
                                l4.set("type", time.timexType());
                                l4.set("value", time.value());
                                l4.set("altValue", time.altVal());
                                l4.set("range", (range != null) ? (Consumer<Writer>) l5 -> {
                                    l5.set("begin", range.begin);
                                    l5.set("end", range.end);
                                    l5.set("duration", range.duration);
                                } : null);
                            });
                        }
                    }));
                }
                if (sentence.get(CoreAnnotations.TokensAnnotation.class) != null) {
                    l2.set("tokens", sentence.get(CoreAnnotations.TokensAnnotation.class).stream().map(token -> (Consumer<Writer>) (Writer l3) -> {
                        l3.set("index", token.index());
                        l3.set("word", token.word());
                        l3.set("originalText", token.originalText());
                        l3.set("lemma", token.lemma());
                        l3.set("characterOffsetBegin", token.beginPosition());
                        l3.set("characterOffsetEnd", token.endPosition());
                        l3.set("pos", token.tag());
                        l3.set("ner", token.ner());
                        l3.set("normalizedNER", token.get(CoreAnnotations.NormalizedNamedEntityTagAnnotation.class));
                        l3.set("speaker", token.get(CoreAnnotations.SpeakerAnnotation.class));
                        l3.set("truecase", token.get(CoreAnnotations.TrueCaseAnnotation.class));
                        l3.set("truecaseText", token.get(CoreAnnotations.TrueCaseTextAnnotation.class));
                        l3.set("before", token.get(CoreAnnotations.BeforeAnnotation.class));
                        l3.set("after", token.get(CoreAnnotations.AfterAnnotation.class));
                        l3.set("entitylink", token.get(CoreAnnotations.WikipediaEntityAnnotation.class));
                        Timex time = token.get(TimeAnnotations.TimexAnnotation.class);
                        if (time != null) {
                            Timex.Range range = time.range();
                            l3.set("timex", (Consumer<Writer>) l4 -> {
                                l4.set("tid", time.tid());
                                l4.set("type", time.timexType());
                                l4.set("value", time.value());
                                l4.set("altValue", time.altVal());
                                l4.set("range", (range != null) ? (Consumer<Writer>) l5 -> {
                                    l5.set("begin", range.begin);
                                    l5.set("end", range.end);
                                    l5.set("duration", range.duration);
                                } : null);
                            });
                        }
                    }));
                }
            }));
        }
        if (doc.get(CorefCoreAnnotations.CorefChainAnnotation.class) != null) {
            Map<Integer, CorefChain> corefChains = doc.get(CorefCoreAnnotations.CorefChainAnnotation.class);
            if (corefChains != null) {
                l1.set("corefs", (Consumer<Writer>) chainWriter -> {
                    for (CorefChain chain : corefChains.values()) {
                        CorefChain.CorefMention representative = chain.getRepresentativeMention();
                        chainWriter.set(Integer.toString(chain.getChainID()), chain.getMentionsInTextualOrder().stream().map(mention -> (Consumer<Writer>) (Writer mentionWriter) -> {
                            mentionWriter.set("id", mention.mentionID);
                            mentionWriter.set("text", mention.mentionSpan);
                            mentionWriter.set("type", mention.mentionType);
                            mentionWriter.set("number", mention.number);
                            mentionWriter.set("gender", mention.gender);
                            mentionWriter.set("animacy", mention.animacy);
                            mentionWriter.set("startIndex", mention.startIndex);
                            mentionWriter.set("endIndex", mention.endIndex);
                            mentionWriter.set("headIndex", mention.headIndex);
                            mentionWriter.set("sentNum", mention.sentNum);
                            mentionWriter.set("position", Arrays.stream(mention.position.elems()).boxed().collect(Collectors.toList()));
                            mentionWriter.set("isRepresentativeMention", mention == representative);
                        }));
                    }
                });
            }
        }
        if (doc.get(CoreAnnotations.QuotationsAnnotation.class) != null) {
            List<CoreMap> quotes = QuoteAnnotator.gatherQuotes(doc);
            l1.set("quotes", quotes.stream().map(quote -> (Consumer<Writer>) (Writer l2) -> {
                l2.set("id", quote.get(CoreAnnotations.QuotationIndexAnnotation.class));
                l2.set("text", quote.get(CoreAnnotations.TextAnnotation.class));
                l2.set("beginIndex", quote.get(CoreAnnotations.CharacterOffsetBeginAnnotation.class));
                l2.set("endIndex", quote.get(CoreAnnotations.CharacterOffsetEndAnnotation.class));
                l2.set("beginToken", quote.get(CoreAnnotations.TokenBeginAnnotation.class));
                l2.set("endToken", quote.get(CoreAnnotations.TokenEndAnnotation.class));
                l2.set("beginSentence", quote.get(CoreAnnotations.SentenceBeginAnnotation.class));
                l2.set("endSentence", quote.get(CoreAnnotations.SentenceEndAnnotation.class));
            }));
        }
    });
    // flush
    l0.writer.flush();
}
Also used : java.util(java.util) CorefChain(edu.stanford.nlp.coref.data.CorefChain) SentenceUtils(edu.stanford.nlp.ling.SentenceUtils) Tree(edu.stanford.nlp.trees.Tree) NaturalLogicAnnotations(edu.stanford.nlp.naturalli.NaturalLogicAnnotations) TimeAnnotations(edu.stanford.nlp.time.TimeAnnotations) SemanticGraphCoreAnnotations(edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations) RNNCoreAnnotations(edu.stanford.nlp.neural.rnn.RNNCoreAnnotations) RelationTriple(edu.stanford.nlp.ie.util.RelationTriple) SemanticGraph(edu.stanford.nlp.semgraph.SemanticGraph) Pair(edu.stanford.nlp.util.Pair) CoreMap(edu.stanford.nlp.util.CoreMap) Timex(edu.stanford.nlp.time.Timex) IndexedWord(edu.stanford.nlp.ling.IndexedWord) TreeCoreAnnotations(edu.stanford.nlp.trees.TreeCoreAnnotations) TreePrint(edu.stanford.nlp.trees.TreePrint) CoreAnnotations(edu.stanford.nlp.ling.CoreAnnotations) SemanticGraphEdge(edu.stanford.nlp.semgraph.SemanticGraphEdge) IOUtils(edu.stanford.nlp.io.IOUtils) Pointer(edu.stanford.nlp.util.Pointer) SentimentCoreAnnotations(edu.stanford.nlp.sentiment.SentimentCoreAnnotations) DecimalFormat(java.text.DecimalFormat) StringOutputStream(edu.stanford.nlp.io.StringOutputStream) Collectors(java.util.stream.Collectors) Span(edu.stanford.nlp.ie.machinereading.structure.Span) Consumer(java.util.function.Consumer) Stream(java.util.stream.Stream) java.io(java.io) Generics(edu.stanford.nlp.util.Generics) CorefCoreAnnotations(edu.stanford.nlp.coref.CorefCoreAnnotations) TreePrint(edu.stanford.nlp.trees.TreePrint) CorefCoreAnnotations(edu.stanford.nlp.coref.CorefCoreAnnotations) Consumer(java.util.function.Consumer) CorefChain(edu.stanford.nlp.coref.data.CorefChain) RelationTriple(edu.stanford.nlp.ie.util.RelationTriple) Tree(edu.stanford.nlp.trees.Tree) TimeAnnotations(edu.stanford.nlp.time.TimeAnnotations) NaturalLogicAnnotations(edu.stanford.nlp.naturalli.NaturalLogicAnnotations) SemanticGraphCoreAnnotations(edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations) CoreMap(edu.stanford.nlp.util.CoreMap) TreePrint(edu.stanford.nlp.trees.TreePrint) SentimentCoreAnnotations(edu.stanford.nlp.sentiment.SentimentCoreAnnotations) SemanticGraphCoreAnnotations(edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations) RNNCoreAnnotations(edu.stanford.nlp.neural.rnn.RNNCoreAnnotations) TreeCoreAnnotations(edu.stanford.nlp.trees.TreeCoreAnnotations) CoreAnnotations(edu.stanford.nlp.ling.CoreAnnotations) SentimentCoreAnnotations(edu.stanford.nlp.sentiment.SentimentCoreAnnotations) CorefCoreAnnotations(edu.stanford.nlp.coref.CorefCoreAnnotations) Timex(edu.stanford.nlp.time.Timex)

Example 35 with Stream

use of java.util.stream.Stream in project torodb by torodb.

the class TopologyCoordinator method lookForSyncSource.

/**
   * Looks for an optimal sync source to replicate from.
   *
   * The first attempt, we ignore those nodes with slave delay higher than our own, hidden nodes,
   * and nodes that are excessively lagged. The second attempt includes such nodes, in case those
   * are the only ones we can reach. This loop attempts to set 'closestIndex'.
   *
   * @param now              the current time
   * @param lastOpAppliedOp  the last OpTime this node has apply
   * @param onlyOptimal      if true, slaves with more delay than ourselve, hidden nodes or
   *                         excessively lagged nodes are ignored
   * @param oldestSyncOpTime the oldest optime considered not excessively lagged. Only used if
   *                         onlyOptimal is true.
   * @return the new optimal sync source, which is not {@link Optional#isPresent() present} if no
   *         one can be chosen
   */
private Optional<MemberConfig> lookForSyncSource(Instant now, Optional<OpTime> lastOpAppliedOp, boolean onlyOptimal, OpTime oldestSyncOpTime) {
    OpTime lastOpApplied = lastOpAppliedOp.orElse(OpTime.EPOCH);
    Stream<MemberHeartbeatData> hbCandidateStream = _hbdata.stream().filter(MemberHeartbeatData::isUp).filter(hbData -> hbData.getState().isReadable()).filter(hbData -> hbData.getOpTime().isAfter(lastOpApplied));
    if (onlyOptimal) {
        hbCandidateStream = hbCandidateStream.filter(hbData -> hbData.getOpTime().isEqualOrAfter(oldestSyncOpTime));
    }
    Stream<MemberConfig> mcCandidateStream = hbCandidateStream.map(this::getMemberConfig).filter(mc -> !isBlacklistedMember(mc, now));
    if (onlyOptimal) {
        mcCandidateStream = mcCandidateStream.filter(mc -> !mc.isHidden()).filter(mc -> mc.getSlaveDelay() < slaveDelaySecs);
    }
    //If there are several candidates, the one whose ping is lower is returned
    return mcCandidateStream.reduce((MemberConfig cand1, MemberConfig cand2) -> {
        long ping1 = getPing(cand1.getHostAndPort());
        long ping2 = getPing(cand2.getHostAndPort());
        if (ping1 < ping2) {
            return cand1;
        }
        return cand2;
    });
}
Also used : MemberHeartbeatData(com.torodb.mongodb.commands.pojos.MemberHeartbeatData) MemberState(com.torodb.mongodb.commands.pojos.MemberState) OpTime(com.eightkdata.mongowp.OpTime) MemberHeartbeatData(com.torodb.mongodb.commands.pojos.MemberHeartbeatData) Nonnegative(javax.annotation.Nonnegative) UnauthorizedException(com.eightkdata.mongowp.exceptions.UnauthorizedException) InvalidOptionsException(com.eightkdata.mongowp.exceptions.InvalidOptionsException) MemberConfig(com.torodb.mongodb.commands.pojos.MemberConfig) ReplSetHeartbeatArgument(com.torodb.mongodb.commands.signatures.internal.ReplSetHeartbeatCommand.ReplSetHeartbeatArgument) HashMap(java.util.HashMap) ReplSetHeartbeatReply(com.torodb.mongodb.commands.signatures.internal.ReplSetHeartbeatReply) ShutdownInProgressException(com.eightkdata.mongowp.exceptions.ShutdownInProgressException) ReplicaSetConfig(com.torodb.mongodb.commands.pojos.ReplicaSetConfig) OptionalInt(java.util.OptionalInt) ArrayList(java.util.ArrayList) ReplSetProtocolVersion(com.torodb.mongodb.commands.pojos.ReplSetProtocolVersion) ReplSetSyncFromReply(com.torodb.mongodb.commands.signatures.repl.ReplSetSyncFromCommand.ReplSetSyncFromReply) UnsignedInteger(com.google.common.primitives.UnsignedInteger) Duration(java.time.Duration) Map(java.util.Map) RemoteCommandResponse(com.eightkdata.mongowp.client.core.MongoConnection.RemoteCommandResponse) MongoException(com.eightkdata.mongowp.exceptions.MongoException) ErrorCode(com.eightkdata.mongowp.ErrorCode) Nonnull(javax.annotation.Nonnull) WeakHashMap(java.util.WeakHashMap) Nullable(javax.annotation.Nullable) NodeNotFoundException(com.eightkdata.mongowp.exceptions.NodeNotFoundException) Set(java.util.Set) Health(com.torodb.mongodb.commands.pojos.MemberHeartbeatData.Health) Instant(java.time.Instant) HostAndPort(com.google.common.net.HostAndPort) List(java.util.List) Logger(org.apache.logging.log4j.Logger) Stream(java.util.stream.Stream) Entry(java.util.Map.Entry) Optional(java.util.Optional) Preconditions(com.google.common.base.Preconditions) LogManager(org.apache.logging.log4j.LogManager) Collections(java.util.Collections) HostUnreachableException(com.eightkdata.mongowp.exceptions.HostUnreachableException) NotThreadSafe(javax.annotation.concurrent.NotThreadSafe) OpTime(com.eightkdata.mongowp.OpTime) MemberConfig(com.torodb.mongodb.commands.pojos.MemberConfig)

Aggregations

Stream (java.util.stream.Stream)161 Collectors (java.util.stream.Collectors)98 List (java.util.List)89 ArrayList (java.util.ArrayList)66 Map (java.util.Map)66 Set (java.util.Set)59 IOException (java.io.IOException)58 Optional (java.util.Optional)45 Collections (java.util.Collections)43 HashMap (java.util.HashMap)43 Arrays (java.util.Arrays)33 HashSet (java.util.HashSet)33 File (java.io.File)32 Path (java.nio.file.Path)32 Function (java.util.function.Function)28 Logger (org.slf4j.Logger)26 LoggerFactory (org.slf4j.LoggerFactory)26 java.util (java.util)25 Predicate (java.util.function.Predicate)23 Objects (java.util.Objects)22