use of com.hazelcast.jet.core.Vertex in project hazelcast-jet-reference-manual by hazelcast.
the class S8 method main.
// end::s1[]
static void main() {
// tag::s2[]
JetInstance jet = Jet.newJetInstance();
int upperBound = 10;
DAG dag = new DAG();
Vertex generateNumbers = dag.newVertex("generate-numbers", () -> new GenerateNumbersP(upperBound));
Vertex logInput = dag.newVertex("log-input", DiagnosticProcessors.writeLoggerP(i -> "Received number: " + i));
dag.edge(Edge.between(generateNumbers, logInput));
try {
jet.newJob(dag).join();
} finally {
Jet.shutdownAll();
}
// end::s2[]
}
use of com.hazelcast.jet.core.Vertex in project hazelcast-jet-reference-manual by hazelcast.
the class ExpertZoneDag method s2.
static void s2() {
// tag::s2[]
// <1>
Vertex source = dag.newVertex("source", SourceProcessors.readFilesP(".", UTF_8, "*", (file, line) -> line));
Vertex transform = dag.newVertex("transform", mapP((String line) -> entry(line, line.length())));
Vertex sink = dag.newVertex("sink", SinkProcessors.writeMapP("sinkMap"));
// <2>
source.localParallelism(1);
// <3>
dag.edge(between(source, transform));
dag.edge(between(transform, sink));
// end::s2[]
}
use of com.hazelcast.jet.core.Vertex in project hazelcast-jet-reference-manual by hazelcast.
the class ExpertZoneDag method s3.
static void s3() {
// tag::s3[]
Vertex source = dag.newVertex("source", SourceProcessors.readFilesP(".", UTF_8, "*", (file, line) -> line));
Vertex toUpper = dag.newVertex("toUpper", mapP((String in) -> in.toUpperCase()));
Vertex toLower = dag.newVertex("toLower", mapP((String in) -> in.toLowerCase()));
dag.edge(between(source, toUpper));
dag.edge(from(source, 1).to(toLower));
// end::s3[]
}
use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class IMapSqlConnector method indexScanReader.
@Nonnull
@SuppressWarnings("checkstyle:ParameterNumber")
public Vertex indexScanReader(@Nonnull DAG dag, @Nonnull Address localMemberAddress, @Nonnull Table table0, @Nonnull MapTableIndex tableIndex, @Nullable Expression<Boolean> remainingFilter, @Nonnull List<Expression<?>> projection, @Nullable IndexFilter indexFilter, @Nullable ComparatorEx<JetSqlRow> comparator, boolean descending) {
PartitionedMapTable table = (PartitionedMapTable) table0;
MapIndexScanMetadata indexScanMetadata = new MapIndexScanMetadata(table.getMapName(), tableIndex.getName(), table.getKeyDescriptor(), table.getValueDescriptor(), Arrays.asList(table.paths()), Arrays.asList(table.types()), indexFilter, projection, remainingFilter, comparator, descending);
Vertex scanner = dag.newUniqueVertex("Index(" + toString(table) + ")", readMapIndexSupplier(indexScanMetadata));
// LP must be 1 - one local index contains all local partitions, if there are 2 local processors,
// the index will be scanned twice and each time half of the partitions will be thrown out.
scanner.localParallelism(1);
if (tableIndex.getType() == IndexType.SORTED) {
Vertex sorter = dag.newUniqueVertex("SortCombine", ProcessorMetaSupplier.forceTotalParallelismOne(ProcessorSupplier.of(mapP(FunctionEx.identity())), localMemberAddress));
assert comparator != null;
dag.edge(between(scanner, sorter).ordered(comparator).distributeTo(localMemberAddress).allToOne(""));
return sorter;
}
return scanner;
}
use of com.hazelcast.jet.core.Vertex in project hazelcast by hazelcast.
the class KafkaSqlConnector method writeProcessor.
@Nonnull
private Vertex writeProcessor(DAG dag, Table table0) {
KafkaTable table = (KafkaTable) table0;
Vertex vStart = dag.newUniqueVertex("Project(" + table + ")", KvProcessors.entryProjector(table.paths(), table.types(), table.keyUpsertDescriptor(), table.valueUpsertDescriptor(), false));
// set the parallelism to match that of the kafka sink - see https://github.com/hazelcast/hazelcast/issues/20507
// TODO eliminate the project vertex altogether and do the projecting in the sink directly
vStart.localParallelism(1);
Vertex vEnd = dag.newUniqueVertex(table.toString(), KafkaProcessors.<Entry<Object, Object>, Object, Object>writeKafkaP(table.kafkaProducerProperties(), table.topicName(), Entry::getKey, Entry::getValue, true));
dag.edge(between(vStart, vEnd));
return vStart;
}
Aggregations