Search in sources :

Example 31 with Vertex

use of com.hazelcast.jet.core.Vertex in project hazelcast-jet by hazelcast.

the class HazelcastConnectorTest method when_readMap_withProjectionToNull_then_nullsSkipped.

@Test
public void when_readMap_withProjectionToNull_then_nullsSkipped() {
    IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(sourceName);
    range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i))));
    DAG dag = new DAG();
    Vertex source = dag.newVertex("source", readMapP(sourceName, new TruePredicate<>(), Projections.singleAttribute("value")));
    Vertex sink = dag.newVertex("sink", writeListP(sinkName));
    dag.edge(between(source, sink));
    jetInstance.newJob(dag).join();
    checkContents_projectedToNull(sinkName);
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) Entry(java.util.Map.Entry) TruePredicate(com.hazelcast.query.TruePredicate) DAG(com.hazelcast.jet.core.DAG) Test(org.junit.Test)

Example 32 with Vertex

use of com.hazelcast.jet.core.Vertex in project hazelcast-jet by hazelcast.

the class HazelcastConnectorTest method when_streamMap_withProjectionToNull_then_nullsSkipped.

@Test
public void when_streamMap_withProjectionToNull_then_nullsSkipped() {
    DAG dag = new DAG();
    Vertex source = dag.newVertex("source", SourceProcessors.streamMapP(streamSourceName, mapPutEvents(), (EventJournalMapEvent<Integer, Entry<Integer, String>> entry) -> entry.getNewValue().getValue(), START_FROM_OLDEST, noWatermarks()));
    Vertex sink = dag.newVertex("sink", writeListP(streamSinkName));
    dag.edge(between(source, sink));
    Job job = jetInstance.newJob(dag);
    IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(streamSourceName);
    range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i))));
    assertTrueEventually(() -> checkContents_projectedToNull(streamSinkName), 10);
    job.cancel();
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) Entry(java.util.Map.Entry) DAG(com.hazelcast.jet.core.DAG) Job(com.hazelcast.jet.Job) Test(org.junit.Test)

Example 33 with Vertex

use of com.hazelcast.jet.core.Vertex in project hazelcast-jet by hazelcast.

the class HazelcastConnectorTest method when_readCache_and_writeCache.

@Test
public void when_readCache_and_writeCache() {
    ICache<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(sourceName);
    range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i));
    DAG dag = new DAG();
    Vertex source = dag.newVertex("source", readCacheP(sourceName));
    Vertex sink = dag.newVertex("sink", writeCacheP(sinkName));
    dag.edge(between(source, sink));
    jetInstance.newJob(dag).join();
    assertEquals(ENTRY_COUNT, jetInstance.getCacheManager().getCache(sinkName).size());
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) DAG(com.hazelcast.jet.core.DAG) Test(org.junit.Test)

Example 34 with Vertex

use of com.hazelcast.jet.core.Vertex in project hazelcast-jet by hazelcast.

the class WordCountTest method testJet.

@Test
public void testJet() {
    DAG dag = new DAG();
    Vertex source = dag.newVertex("source", SourceProcessors.readMapP("words"));
    Vertex tokenize = dag.newVertex("tokenize", flatMapP((Map.Entry<?, String> line) -> {
        StringTokenizer s = new StringTokenizer(line.getValue());
        return () -> s.hasMoreTokens() ? s.nextToken() : null;
    }));
    // word -> (word, count)
    Vertex aggregateStage1 = dag.newVertex("aggregateStage1", aggregateByKeyP(singletonList(wholeItem()), counting(), Util::entry));
    // (word, count) -> (word, count)
    DistributedFunction<Entry, ?> getEntryKeyFn = Entry::getKey;
    Vertex aggregateStage2 = dag.newVertex("aggregateStage2", aggregateByKeyP(singletonList(getEntryKeyFn), summingLong(Entry<String, Long>::getValue), Util::entry));
    Vertex sink = dag.newVertex("sink", SinkProcessors.writeMapP("counts"));
    dag.edge(between(source.localParallelism(1), tokenize)).edge(between(tokenize, aggregateStage1).partitioned(wholeItem(), HASH_CODE)).edge(between(aggregateStage1, aggregateStage2).distributed().partitioned(entryKey())).edge(between(aggregateStage2, sink.localParallelism(1)));
    benchmark("jet", () -> instance.newJob(dag).join());
    assertCounts(instance.getMap("counts"));
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) StringTokenizer(java.util.StringTokenizer) Entry(java.util.Map.Entry) DAG(com.hazelcast.jet.core.DAG) Map(java.util.Map) HashMap(java.util.HashMap) NightlyTest(com.hazelcast.test.annotation.NightlyTest) Test(org.junit.Test)

Example 35 with Vertex

use of com.hazelcast.jet.core.Vertex in project hazelcast-jet-reference-manual by hazelcast.

the class StockExchangeCoreApi method buildDag.

static DAG buildDag() {
    // tag::s1[]
    DistributedToLongFunction<? super Trade> timestampFn = Trade::timestamp;
    DistributedFunction<? super Trade, ?> keyFn = Trade::productId;
    SlidingWindowPolicy winPolicy = slidingWinPolicy(SLIDING_WINDOW_LENGTH_MILLIS, SLIDE_STEP_MILLIS);
    DAG dag = new DAG();
    Vertex tradeSource = dag.newVertex("trade-source", SourceProcessors.<Trade, Long, Trade>streamMapP(TRADES_MAP_NAME, // <1>
    alwaysTrue(), // <1>
    EventJournalMapEvent::getNewValue, // <2>
    JournalInitialPosition.START_FROM_OLDEST, wmGenParams(// <3>
    timestampFn, // <4>
    limitingLag(SECONDS.toMillis(3)), // <5>
    emitByFrame(winPolicy), // <6>
    SECONDS.toMillis(3))));
    Vertex slidingStage1 = dag.newVertex("sliding-stage-1", Processors.accumulateByFrameP(singletonList(keyFn), singletonList(timestampFn), TimestampKind.EVENT, winPolicy, counting()));
    Vertex slidingStage2 = dag.newVertex("sliding-stage-2", Processors.combineToSlidingWindowP(winPolicy, counting(), TimestampedEntry::new));
    Vertex formatOutput = dag.newVertex("format-output", mapUsingContextP(ContextFactory.withCreateFn(x -> DateTimeFormatter.ofPattern("HH:mm:ss.SSS")), (DateTimeFormatter timeFormat, TimestampedEntry<String, Long> tse) -> String.format("%s %5s %4d", timeFormat.format(Instant.ofEpochMilli(tse.getTimestamp()).atZone(ZoneId.systemDefault())), tse.getKey(), tse.getValue())));
    Vertex sink = dag.newVertex("sink", SinkProcessors.writeFileP(OUTPUT_DIR_NAME));
    tradeSource.localParallelism(1);
    return dag.edge(between(tradeSource, slidingStage1).partitioned(keyFn, HASH_CODE)).edge(between(slidingStage1, slidingStage2).partitioned(entryKey(), HASH_CODE).distributed()).edge(between(slidingStage2, formatOutput).isolated()).edge(between(formatOutput, sink));
// end::s1[]
}
Also used : Vertex(com.hazelcast.jet.core.Vertex) SlidingWindowPolicy(com.hazelcast.jet.core.SlidingWindowPolicy) DAG(com.hazelcast.jet.core.DAG) DateTimeFormatter(java.time.format.DateTimeFormatter)

Aggregations

Vertex (com.hazelcast.jet.core.Vertex)189 DAG (com.hazelcast.jet.core.DAG)130 Test (org.junit.Test)95 QuickTest (com.hazelcast.test.annotation.QuickTest)57 Job (com.hazelcast.jet.Job)53 ParallelJVMTest (com.hazelcast.test.annotation.ParallelJVMTest)48 Entry (java.util.Map.Entry)41 List (java.util.List)28 Edge.between (com.hazelcast.jet.core.Edge.between)26 Map (java.util.Map)26 Assert.assertEquals (org.junit.Assert.assertEquals)23 ProcessorMetaSupplier (com.hazelcast.jet.core.ProcessorMetaSupplier)21 IntStream (java.util.stream.IntStream)21 Assert.assertTrue (org.junit.Assert.assertTrue)19 ProcessorSupplier (com.hazelcast.jet.core.ProcessorSupplier)18 Category (org.junit.experimental.categories.Category)18 Collectors.toList (java.util.stream.Collectors.toList)17 Nonnull (javax.annotation.Nonnull)17 FunctionEx (com.hazelcast.function.FunctionEx)15 Edge (com.hazelcast.jet.core.Edge)15