Search in sources :

Example 26 with IntFunction

use of java.util.function.IntFunction in project lucene-solr by apache.

the class SimpleTextDocValuesReader method getNumericNonIterator.

IntFunction<Long> getNumericNonIterator(FieldInfo fieldInfo) throws IOException {
    final OneField field = fields.get(fieldInfo.name);
    assert field != null;
    // valid:
    assert field != null : "field=" + fieldInfo.name + " fields=" + fields;
    final IndexInput in = data.clone();
    final BytesRefBuilder scratch = new BytesRefBuilder();
    final DecimalFormat decoder = new DecimalFormat(field.pattern, new DecimalFormatSymbols(Locale.ROOT));
    decoder.setParseBigDecimal(true);
    return new IntFunction<Long>() {

        @Override
        public Long apply(int docID) {
            try {
                //System.out.println(Thread.currentThread().getName() + ": get docID=" + docID + " in=" + in);
                if (docID < 0 || docID >= maxDoc) {
                    throw new IndexOutOfBoundsException("docID must be 0 .. " + (maxDoc - 1) + "; got " + docID);
                }
                in.seek(field.dataStartFilePointer + (1 + field.pattern.length() + 2) * docID);
                SimpleTextUtil.readLine(in, scratch);
                //System.out.println("parsing delta: " + scratch.utf8ToString());
                BigDecimal bd;
                try {
                    bd = (BigDecimal) decoder.parse(scratch.get().utf8ToString());
                } catch (ParseException pe) {
                    throw new CorruptIndexException("failed to parse BigDecimal value", in, pe);
                }
                // read the line telling us if it's real or not
                SimpleTextUtil.readLine(in, scratch);
                return BigInteger.valueOf(field.minValue).add(bd.toBigIntegerExact()).longValue();
            } catch (IOException ioe) {
                throw new RuntimeException(ioe);
            }
        }
    };
}
Also used : BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) DecimalFormatSymbols(java.text.DecimalFormatSymbols) IntFunction(java.util.function.IntFunction) DecimalFormat(java.text.DecimalFormat) ChecksumIndexInput(org.apache.lucene.store.ChecksumIndexInput) BufferedChecksumIndexInput(org.apache.lucene.store.BufferedChecksumIndexInput) IndexInput(org.apache.lucene.store.IndexInput) ParseException(java.text.ParseException) IOException(java.io.IOException) BigDecimal(java.math.BigDecimal)

Example 27 with IntFunction

use of java.util.function.IntFunction in project narchy by automenta.

the class QuestionTest method questionDrivesInference.

// @Test public void testQuestionHandler() throws Narsese.NarseseException {
// NAR nar = NARS.shell();
// 
// final int[] s = {0};
// new TaskMatch("add(%1, %2, #x)", nar) {
// 
// @Override public boolean test(@NotNull Task task) { return task.isQuestOrQuestion(); }
// 
// @Override
// protected void accept(Task task, Map<Term, Term> xy) {
// System.out.println(task + " " + xy);
// s[0] = xy.size();
// }
// };
// 
// nar.ask($.$("add(1, 2, #x)"));
// 
// assertEquals(3, s[0]);
// 
// }
// @Test public void testOperationHandler() throws Narsese.NarseseException {
// NAR nar = NARS.shell();
// 
// final int[] s = {0};
// StringBuilder match = new StringBuilder();
// new OperationTaskMatch( $.$("add(%1, %2, #x)"), nar) {
// 
// @Override public boolean test(@NotNull Task task) { return task.isQuestOrQuestion(); }
// 
// @Override
// protected void onMatch(Term[] args) {
// match.append(Arrays.toString(args)).append(' ');
// }
// };
// 
// nar.ask($.$("add(1, 2, #x)"));
// 
// assertTrue(match.toString().contains("[1, 2, #1026]"));
// 
// nar.ask($.$("add(1, #x)"));
// nar.ask($.$("(#x --> add)"));
// 
// assertFalse(match.toString().contains("[1, #1026]"));
// }
/**
 * tests whether the use of a question guides inference as measured by the speed to reach a specific conclusion
 */
@Test
public void questionDrivesInference() {
    final int[] dims = { 3, 2 };
    final int timelimit = 2400;
    TaskStatistics withTasks = new TaskStatistics();
    TaskStatistics withoutTasks = new TaskStatistics();
    DoubleSummaryStatistics withTime = new DoubleSummaryStatistics();
    DoubleSummaryStatistics withOutTime = new DoubleSummaryStatistics();
    IntFunction<NAR> narProvider = (seed) -> {
        NAR d = NARS.tmp(1);
        d.random().setSeed(seed);
        d.termVolumeMax.set(16);
        d.freqResolution.set(0.1f);
        return d;
    };
    BiFunction<Integer, Integer, TestNAR> testProvider = (seed, variation) -> {
        NAR n = narProvider.apply(seed);
        TestNAR t = new TestNAR(n);
        switch(variation) {
            case 0:
                new DeductiveMeshTest(t, dims, timelimit);
                break;
            case 1:
                new DeductiveMeshTest(t, dims, timelimit) {

                    @Override
                    public void ask(@NotNull TestNAR n, Term term) {
                    // disabled
                    }
                };
                break;
        }
        return t;
    };
    for (int i = 0; i < 10; i++) {
        int seed = i + 1;
        TestNAR withQuestion = testProvider.apply(seed, 0);
        withQuestion.test(true);
        withTime.accept(withQuestion.time());
        withTasks.add(withQuestion.nar);
        TestNAR withoutQuestion = testProvider.apply(seed, 1);
        withoutQuestion.test(true);
        withOutTime.accept(withoutQuestion.time());
        withoutTasks.add(withoutQuestion.nar);
    }
    withTasks.print();
    withoutTasks.print();
    assertNotEquals(withTime, withOutTime);
    System.out.println("with: " + withTime);
    System.out.println("withOut: " + withOutTime);
// assertTrue(withTime.getSum() < withOutTime.getSum());
// assertTrue(withTime.getSum() < 2 * withOutTime.getSum()); //less than half, considering that a search "diameter" becomes a "radius" by providing the answer end-point
}
Also used : nars(nars) BiFunction(java.util.function.BiFunction) DeductiveMeshTest(nars.test.DeductiveMeshTest) TestNAR(nars.test.TestNAR) Assertions.assertNotEquals(org.junit.jupiter.api.Assertions.assertNotEquals) Disabled(org.junit.jupiter.api.Disabled) DoubleSummaryStatistics(java.util.DoubleSummaryStatistics) ETERNAL(nars.time.Tense.ETERNAL) Test(org.junit.jupiter.api.Test) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) TaskStatistics(nars.task.util.TaskStatistics) NotNull(org.jetbrains.annotations.NotNull) Term(nars.term.Term) IntFunction(java.util.function.IntFunction) TestNAR(nars.test.TestNAR) DeductiveMeshTest(nars.test.DeductiveMeshTest) Term(nars.term.Term) DoubleSummaryStatistics(java.util.DoubleSummaryStatistics) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TaskStatistics(nars.task.util.TaskStatistics) TestNAR(nars.test.TestNAR) DeductiveMeshTest(nars.test.DeductiveMeshTest) Test(org.junit.jupiter.api.Test)

Example 28 with IntFunction

use of java.util.function.IntFunction in project kafka-streams-examples by confluentinc.

the class KafkaMusicExampleTest method shouldCreateChartsAndAccessThemViaInteractiveQueries.

@Test
public void shouldCreateChartsAndAccessThemViaInteractiveQueries() throws Exception {
    final Properties props = new Properties();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    final Map<String, String> serdeConfig = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
    final SpecificAvroSerializer<PlayEvent> playEventSerializer = new SpecificAvroSerializer<>();
    playEventSerializer.configure(serdeConfig, false);
    final SpecificAvroSerializer<Song> songSerializer = new SpecificAvroSerializer<>();
    songSerializer.configure(serdeConfig, false);
    final KafkaProducer<String, PlayEvent> playEventProducer = new KafkaProducer<>(props, Serdes.String().serializer(), playEventSerializer);
    final KafkaProducer<Long, Song> songProducer = new KafkaProducer<>(props, new LongSerializer(), songSerializer);
    final List<Song> songs = Arrays.asList(new Song(1L, "Fresh Fruit For Rotting Vegetables", "Dead Kennedys", "Chemical Warfare", "Punk"), new Song(2L, "We Are the League", "Anti-Nowhere League", "Animal", "Punk"), new Song(3L, "Live In A Dive", "Subhumans", "All Gone Dead", "Punk"), new Song(4L, "PSI", "Wheres The Pope?", "Fear Of God", "Punk"), new Song(5L, "Totally Exploited", "The Exploited", "Punks Not Dead", "Punk"), new Song(6L, "The Audacity Of Hype", "Jello Biafra And The Guantanamo School Of " + "Medicine", "Three Strikes", "Punk"), new Song(7L, "Licensed to Ill", "The Beastie Boys", "Fight For Your Right", "Hip Hop"), new Song(8L, "De La Soul Is Dead", "De La Soul", "Oodles Of O's", "Hip Hop"), new Song(9L, "Straight Outta Compton", "N.W.A", "Gangsta Gangsta", "Hip Hop"), new Song(10L, "Fear Of A Black Planet", "Public Enemy", "911 Is A Joke", "Hip Hop"), new Song(11L, "Curtain Call - The Hits", "Eminem", "Fack", "Hip Hop"), new Song(12L, "The Calling", "Hilltop Hoods", "The Calling", "Hip Hop"));
    songs.forEach(song -> songProducer.send(new ProducerRecord<Long, Song>(KafkaMusicExample.SONG_FEED, song.getId(), song)));
    songProducer.flush();
    songProducer.close();
    // create the play events we can use for charting
    sendPlayEvents(6, songs.get(0), playEventProducer);
    sendPlayEvents(5, songs.get(1), playEventProducer);
    sendPlayEvents(4, songs.get(2), playEventProducer);
    sendPlayEvents(3, songs.get(3), playEventProducer);
    sendPlayEvents(2, songs.get(4), playEventProducer);
    sendPlayEvents(1, songs.get(5), playEventProducer);
    sendPlayEvents(6, songs.get(6), playEventProducer);
    sendPlayEvents(5, songs.get(7), playEventProducer);
    sendPlayEvents(4, songs.get(8), playEventProducer);
    sendPlayEvents(3, songs.get(9), playEventProducer);
    sendPlayEvents(2, songs.get(10), playEventProducer);
    sendPlayEvents(1, songs.get(11), playEventProducer);
    playEventProducer.close();
    streams.start();
    // wait until the StreamsMetadata is available as this indicates that
    // KafkaStreams initialization has occurred
    TestUtils.waitForCondition(() -> !StreamsMetadata.NOT_AVAILABLE.equals(streams.allMetadataForStore(KafkaMusicExample.TOP_FIVE_SONGS_STORE)), MAX_WAIT_MS, "StreamsMetadata should be available");
    final String baseUrl = "http://localhost:" + appServerPort + "/kafka-music";
    final Client client = ClientBuilder.newClient();
    // Wait until the all-songs state store has some data in it
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Long, Song> songsStore;
        try {
            songsStore = streams.store(KafkaMusicExample.ALL_SONGS, QueryableStoreTypes.<Long, Song>keyValueStore());
            return songsStore.all().hasNext();
        } catch (Exception e) {
            return false;
        }
    }, MAX_WAIT_MS, KafkaMusicExample.ALL_SONGS + " should be non-empty");
    final IntFunction<SongPlayCountBean> intFunction = index -> {
        final Song song = songs.get(index);
        return songCountPlayBean(song, 6L - (index % 6));
    };
    // Verify that the charts are as expected
    verifyChart(baseUrl + "/charts/genre/punk", client, IntStream.range(0, 5).mapToObj(intFunction).collect(Collectors.toList()));
    verifyChart(baseUrl + "/charts/genre/hip hop", client, IntStream.range(6, 11).mapToObj(intFunction).collect(Collectors.toList()));
    verifyChart(baseUrl + "/charts/top-five", client, Arrays.asList(songCountPlayBean(songs.get(0), 6L), songCountPlayBean(songs.get(6), 6L), songCountPlayBean(songs.get(1), 5L), songCountPlayBean(songs.get(7), 5L), songCountPlayBean(songs.get(2), 4L)));
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) IntStream(java.util.stream.IntStream) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) WordCountInteractiveQueriesExampleTest.randomFreeLocalPort(io.confluent.examples.streams.interactivequeries.WordCountInteractiveQueriesExampleTest.randomFreeLocalPort) BeforeClass(org.junit.BeforeClass) HostInfo(org.apache.kafka.streams.state.HostInfo) Client(javax.ws.rs.client.Client) SpecificAvroSerializer(io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer) ClientBuilder(javax.ws.rs.client.ClientBuilder) MediaType(javax.ws.rs.core.MediaType) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) EmbeddedSingleNodeKafkaCluster(io.confluent.examples.streams.kafka.EmbeddedSingleNodeKafkaCluster) StreamsMetadata(org.apache.kafka.streams.state.StreamsMetadata) After(org.junit.After) Map(java.util.Map) Serdes(org.apache.kafka.common.serialization.Serdes) PlayEvent(io.confluent.examples.streams.avro.PlayEvent) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) ClassRule(org.junit.ClassRule) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) Before(org.junit.Before) IntFunction(java.util.function.IntFunction) Song(io.confluent.examples.streams.avro.Song) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Test(org.junit.Test) Invocation(javax.ws.rs.client.Invocation) AbstractKafkaAvroSerDeConfig(io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Collectors(java.util.stream.Collectors) QueryableStoreTypes(org.apache.kafka.streams.state.QueryableStoreTypes) GenericType(javax.ws.rs.core.GenericType) List(java.util.List) KafkaStreams(org.apache.kafka.streams.KafkaStreams) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Collections(java.util.Collections) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) SpecificAvroSerializer(io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer) Properties(java.util.Properties) Song(io.confluent.examples.streams.avro.Song) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Client(javax.ws.rs.client.Client) PlayEvent(io.confluent.examples.streams.avro.PlayEvent) Test(org.junit.Test)

Example 29 with IntFunction

use of java.util.function.IntFunction in project RichTextFX by FXMisc.

the class LineIndicatorDemo method start.

@Override
public void start(Stage primaryStage) {
    CodeArea codeArea = new CodeArea();
    IntFunction<Node> numberFactory = LineNumberFactory.get(codeArea);
    IntFunction<Node> arrowFactory = new ArrowFactory(codeArea.currentParagraphProperty());
    IntFunction<Node> graphicFactory = line -> {
        HBox hbox = new HBox(numberFactory.apply(line), arrowFactory.apply(line));
        hbox.setAlignment(Pos.CENTER_LEFT);
        return hbox;
    };
    codeArea.setParagraphGraphicFactory(graphicFactory);
    codeArea.replaceText("The green arrow will only be on the line where the caret appears.\n\nTry it.");
    codeArea.moveTo(0, 0);
    primaryStage.setScene(new Scene(new StackPane(codeArea), 600, 400));
    primaryStage.show();
}
Also used : Application(javafx.application.Application) Pos(javafx.geometry.Pos) Scene(javafx.scene.Scene) HBox(javafx.scene.layout.HBox) Stage(javafx.stage.Stage) Node(javafx.scene.Node) StackPane(javafx.scene.layout.StackPane) CodeArea(org.fxmisc.richtext.CodeArea) StyledTextArea(org.fxmisc.richtext.StyledTextArea) LineNumberFactory(org.fxmisc.richtext.LineNumberFactory) IntFunction(java.util.function.IntFunction) HBox(javafx.scene.layout.HBox) Node(javafx.scene.Node) Scene(javafx.scene.Scene) StackPane(javafx.scene.layout.StackPane) CodeArea(org.fxmisc.richtext.CodeArea)

Example 30 with IntFunction

use of java.util.function.IntFunction in project hazelcast by hazelcast.

the class ExecutionPlan method createLocalOutboundCollector.

private OutboundCollector createLocalOutboundCollector(EdgeDef edge, int processorIndex, int totalPartitionCount, int[][] partitionsPerProcessor) {
    int upstreamParallelism = edge.sourceVertex().localParallelism();
    int downstreamParallelism = edge.destVertex().localParallelism();
    int queueSize = edge.getConfig().getQueueSize();
    int numRemoteMembers = ptionArrgmt.getRemotePartitionAssignment().size();
    if (edge.routingPolicy() == RoutingPolicy.ISOLATED) {
        ConcurrentConveyor<Object>[] localConveyors = localConveyorMap.computeIfAbsent(edge.edgeId(), edgeId -> {
            int queueCount = upstreamParallelism / downstreamParallelism;
            int remainder = upstreamParallelism % downstreamParallelism;
            return Stream.concat(Arrays.stream(createConveyorArray(remainder, queueCount + 1, queueSize)), Arrays.stream(createConveyorArray(downstreamParallelism - remainder, Math.max(1, queueCount), queueSize))).toArray((IntFunction<ConcurrentConveyor<Object>[]>) ConcurrentConveyor[]::new);
        });
        OutboundCollector[] localCollectors = IntStream.range(0, downstreamParallelism).filter(i -> i % upstreamParallelism == processorIndex % downstreamParallelism).mapToObj(i -> new ConveyorCollector(localConveyors[i], processorIndex / downstreamParallelism, null)).toArray(OutboundCollector[]::new);
        return compositeCollector(localCollectors, edge, totalPartitionCount, true);
    } else {
        ConcurrentConveyor<Object>[] localConveyors = localConveyorMap.computeIfAbsent(edge.edgeId(), edgeId -> {
            int queueCount = upstreamParallelism + (!edge.isLocal() ? numRemoteMembers : 0);
            return createConveyorArray(downstreamParallelism, queueCount, queueSize);
        });
        OutboundCollector[] localCollectors = new OutboundCollector[downstreamParallelism];
        Arrays.setAll(localCollectors, n -> new ConveyorCollector(localConveyors[n], processorIndex, partitionsPerProcessor[n]));
        return compositeCollector(localCollectors, edge, totalPartitionCount, true);
    }
}
Also used : Address(com.hazelcast.cluster.Address) ImdgUtil.getMemberConnection(com.hazelcast.jet.impl.util.ImdgUtil.getMemberConnection) Arrays(java.util.Arrays) SnapshotContext(com.hazelcast.jet.impl.execution.SnapshotContext) Collections.unmodifiableList(java.util.Collections.unmodifiableList) ConcurrentConveyor.concurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor.concurrentConveyor) Processor(com.hazelcast.jet.core.Processor) OutboundCollector.compositeCollector(com.hazelcast.jet.impl.execution.OutboundCollector.compositeCollector) ObjectWithPartitionId(com.hazelcast.jet.impl.util.ObjectWithPartitionId) ProcessorTasklet(com.hazelcast.jet.impl.execution.ProcessorTasklet) ImdgUtil(com.hazelcast.jet.impl.util.ImdgUtil) Collectors.toMap(java.util.stream.Collectors.toMap) ConcurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor) Map(java.util.Map) Util.memoize(com.hazelcast.jet.impl.util.Util.memoize) SerializationServiceAware(com.hazelcast.internal.serialization.SerializationServiceAware) DISTRIBUTE_TO_ALL(com.hazelcast.jet.core.Edge.DISTRIBUTE_TO_ALL) ObjectDataInput(com.hazelcast.nio.ObjectDataInput) TASKLET_INIT_CLOSE_EXECUTOR_NAME(com.hazelcast.jet.impl.execution.TaskletExecutionService.TASKLET_INIT_CLOSE_EXECUTOR_NAME) InboundEdgeStream(com.hazelcast.jet.impl.execution.InboundEdgeStream) PrefixedLogger.prefix(com.hazelcast.jet.impl.util.PrefixedLogger.prefix) Collection(java.util.Collection) IPartitionService(com.hazelcast.internal.partition.IPartitionService) JobConfig(com.hazelcast.jet.config.JobConfig) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ConcurrentInboundEdgeStream(com.hazelcast.jet.impl.execution.ConcurrentInboundEdgeStream) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) List(java.util.List) Stream(java.util.stream.Stream) DEFAULT_QUEUE_SIZE(com.hazelcast.jet.config.EdgeConfig.DEFAULT_QUEUE_SIZE) StoreSnapshotTasklet(com.hazelcast.jet.impl.execution.StoreSnapshotTasklet) ObjectDataOutput(com.hazelcast.nio.ObjectDataOutput) TopologyChangedException(com.hazelcast.jet.core.TopologyChangedException) IntStream(java.util.stream.IntStream) ComparatorEx(com.hazelcast.function.ComparatorEx) IdentifiedDataSerializable(com.hazelcast.nio.serialization.IdentifiedDataSerializable) ImdgUtil.writeList(com.hazelcast.jet.impl.util.ImdgUtil.writeList) OutboundEdgeStream(com.hazelcast.jet.impl.execution.OutboundEdgeStream) RoutingPolicy(com.hazelcast.jet.core.Edge.RoutingPolicy) Util.doWithClassLoader(com.hazelcast.jet.impl.util.Util.doWithClassLoader) SenderTasklet(com.hazelcast.jet.impl.execution.SenderTasklet) HashMap(java.util.HashMap) Supplier(java.util.function.Supplier) ProcSupplierCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcSupplierCtx) ArrayList(java.util.ArrayList) PrefixedLogger.prefixedLogger(com.hazelcast.jet.impl.util.PrefixedLogger.prefixedLogger) JetException(com.hazelcast.jet.JetException) ConveyorCollector(com.hazelcast.jet.impl.execution.ConveyorCollector) ReceiverTasklet(com.hazelcast.jet.impl.execution.ReceiverTasklet) ILogger(com.hazelcast.logging.ILogger) InternalSerializationService(com.hazelcast.internal.serialization.InternalSerializationService) Nonnull(javax.annotation.Nonnull) ProcessorSupplier(com.hazelcast.jet.core.ProcessorSupplier) QueuedPipe(com.hazelcast.internal.util.concurrent.QueuedPipe) IntFunction(java.util.function.IntFunction) JetConfig(com.hazelcast.jet.config.JetConfig) NodeEngineImpl(com.hazelcast.spi.impl.NodeEngineImpl) OneToOneConcurrentArrayQueue(com.hazelcast.internal.util.concurrent.OneToOneConcurrentArrayQueue) Connection(com.hazelcast.internal.nio.Connection) Tasklet(com.hazelcast.jet.impl.execution.Tasklet) ProcCtx(com.hazelcast.jet.impl.execution.init.Contexts.ProcCtx) AsyncSnapshotWriterImpl(com.hazelcast.jet.impl.util.AsyncSnapshotWriterImpl) IOException(java.io.IOException) ConveyorCollectorWithPartition(com.hazelcast.jet.impl.execution.ConveyorCollectorWithPartition) Subject(javax.security.auth.Subject) File(java.io.File) ImdgUtil.readList(com.hazelcast.jet.impl.util.ImdgUtil.readList) Collectors.toList(java.util.stream.Collectors.toList) OutboundCollector(com.hazelcast.jet.impl.execution.OutboundCollector) JobClassLoaderService(com.hazelcast.jet.impl.JobClassLoaderService) ProcessingGuarantee(com.hazelcast.jet.config.ProcessingGuarantee) JetServiceBackend(com.hazelcast.jet.impl.JetServiceBackend) ConveyorCollector(com.hazelcast.jet.impl.execution.ConveyorCollector) ConcurrentConveyor(com.hazelcast.internal.util.concurrent.ConcurrentConveyor) OutboundCollector(com.hazelcast.jet.impl.execution.OutboundCollector)

Aggregations

IntFunction (java.util.function.IntFunction)33 List (java.util.List)10 Collectors (java.util.stream.Collectors)10 Test (org.junit.Test)9 Arrays (java.util.Arrays)8 IOException (java.io.IOException)7 ArrayList (java.util.ArrayList)7 IntStream (java.util.stream.IntStream)7 LoggerFactory (org.slf4j.LoggerFactory)6 HashMap (java.util.HashMap)5 Map (java.util.Map)5 Objects (java.util.Objects)5 Set (java.util.Set)5 Logger (org.slf4j.Logger)5 File (java.io.File)4 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)4 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)4 Function (java.util.function.Function)4 Supplier (java.util.function.Supplier)4 BytesRef (org.apache.lucene.util.BytesRef)4