use of com.hazelcast.jet.datamodel.ItemsByTag in project hazelcast by hazelcast.
the class CoAggregateOperationBuilder method build.
/**
* Builds and returns the multi-input {@link AggregateOperation}. It will
* call the supplied {@code exportFinishFn} to transform the {@link ItemsByTag}
* it creates to the result type it emits as the actual result.
*
* @param exportFinishFn function to convert {@link ItemsByTag} to the
* target result type. It must be stateless and {@linkplain
* Processor#isCooperative() cooperative}.
*/
@Nonnull
@SuppressWarnings({ "unchecked", "ConstantConditions" })
public <R> AggregateOperation<Object[], R> build(@Nonnull FunctionEx<? super ItemsByTag, ? extends R> exportFinishFn) {
checkSerializable(exportFinishFn, "exportFinishFn");
Tag[] tags = opsByTag.keySet().stream().sorted().toArray(Tag[]::new);
for (int i = 0; i < tags.length; i++) {
Preconditions.checkTrue(tags[i].index() == i, "Registered tags' indices are " + stream(tags).map(Tag::index).collect(toList()) + ", but should be " + range(0, tags.length).boxed().collect(toList()));
}
// Variable `sorted` extracted due to type inference failure
Stream<Entry<Tag, AggregateOperation1>> sorted = opsByTag.entrySet().stream().sorted(comparing(Entry::getKey));
List<AggregateOperation1> ops = sorted.map(Entry::getValue).collect(toList());
BiConsumerEx[] combineFns = ops.stream().map(AggregateOperation::combineFn).toArray(BiConsumerEx[]::new);
BiConsumerEx[] deductFns = ops.stream().map(AggregateOperation::deductFn).toArray(BiConsumerEx[]::new);
FunctionEx[] exportFns = ops.stream().map(AggregateOperation::exportFn).toArray(FunctionEx[]::new);
FunctionEx[] finishFns = ops.stream().map(AggregateOperation::finishFn).toArray(FunctionEx[]::new);
AggregateOperationBuilder.VarArity<Object[], Void> b = AggregateOperation.withCreate(() -> ops.stream().map(op -> op.createFn().get()).toArray()).varArity();
opsByTag.forEach((tag, op) -> {
int index = tag.index();
b.andAccumulate(tag, (acc, item) -> op.accumulateFn().accept(acc[index], item));
});
return b.andCombine(stream(combineFns).anyMatch(Objects::isNull) ? null : (acc1, acc2) -> {
for (int i = 0; i < combineFns.length; i++) {
combineFns[i].accept(acc1[i], acc2[i]);
}
}).andDeduct(stream(deductFns).anyMatch(Objects::isNull) ? null : (acc1, acc2) -> {
for (int i = 0; i < deductFns.length; i++) {
deductFns[i].accept(acc1[i], acc2[i]);
}
}).<R>andExport(acc -> {
ItemsByTag result = new ItemsByTag();
for (int i = 0; i < exportFns.length; i++) {
result.put(tags[i], exportFns[i].apply(acc[i]));
}
return exportFinishFn.apply(result);
}).andFinish(acc -> {
ItemsByTag result = new ItemsByTag();
for (int i = 0; i < finishFns.length; i++) {
result.put(tags[i], finishFns[i].apply(acc[i]));
}
return exportFinishFn.apply(result);
});
}
use of com.hazelcast.jet.datamodel.ItemsByTag in project hazelcast by hazelcast.
the class AllOfAggregationBuilder method build.
/**
* Builds and returns the composite {@link AggregateOperation1}. It will
* call the supplied {@code exportFinishFn} to transform the {@link ItemsByTag}
* it creates to the result type it emits as the actual result.
*
* @param exportFinishFn function that converts the {@link ItemsByTag} to
* the target result type. It must be stateless and {@linkplain
* Processor#isCooperative() cooperative}.
*/
@Nonnull
@SuppressWarnings({ "unchecked", "ConstantConditions" })
public <R> AggregateOperation1<T, Object[], R> build(@Nonnull FunctionEx<ItemsByTag, R> exportFinishFn) {
checkSerializable(exportFinishFn, "exportFinishFn");
// Avoid capturing this builder in the lambdas:
List<Tag> tags = this.tags;
List<AggregateOperation1> operations = this.operations;
return (AggregateOperation1<T, Object[], R>) AggregateOperation.withCreate(() -> {
Object[] acc = new Object[tags.size()];
Arrays.setAll(acc, i -> operations.get(i).createFn().get());
return acc;
}).andAccumulate((acc, item) -> {
for (int i = 0; i < acc.length; i++) {
operations.get(i).accumulateFn().accept(acc[i], item);
}
}).andCombine(operations.stream().anyMatch(o -> o.combineFn() == null) ? null : (acc1, acc2) -> {
for (int i = 0; i < acc1.length; i++) {
operations.get(i).combineFn().accept(acc1[i], acc2[i]);
}
}).andDeduct(operations.stream().anyMatch(o -> o.deductFn() == null) ? null : (acc1, acc2) -> {
for (int i = 0; i < acc1.length; i++) {
operations.get(i).deductFn().accept(acc1[i], acc2[i]);
}
}).andExport(acc -> {
ItemsByTag result = new ItemsByTag();
for (int i = 0; i < tags.size(); i++) {
Object exportedVal = operations.get(i).exportFn().apply(acc[i]);
result.put(tags.get(i), exportedVal);
}
return exportFinishFn.apply(result);
}).andFinish(acc -> {
ItemsByTag result = new ItemsByTag();
for (int i = 0; i < tags.size(); i++) {
Object finishedVal = operations.get(i).finishFn().apply(acc[i]);
result.put(tags.get(i), finishedVal);
}
return exportFinishFn.apply(result);
});
}
use of com.hazelcast.jet.datamodel.ItemsByTag in project hazelcast by hazelcast.
the class WindowAggregateTest method aggregateBuilder_withComplexAggrOp.
@Test
public void aggregateBuilder_withComplexAggrOp() {
// Given
CoAggregateFixture fx = new CoAggregateFixture();
// When
WindowAggregateBuilder1<Integer> b = fx.stage0.aggregateBuilder();
Tag<Integer> tag0_in = b.tag0();
Tag<Integer> tag1_in = b.add(fx.newStage());
CoAggregateOperationBuilder b2 = coAggregateOperationBuilder();
Tag<Long> tag0 = b2.add(tag0_in, SUMMING);
Tag<Long> tag1 = b2.add(tag1_in, SUMMING);
StreamStage<WindowResult<ItemsByTag>> aggregated = b.build(b2.build());
// Then
aggregated.writeTo(sink);
execute();
assertEquals(fx.expectedString2, streamToString(this.<ItemsByTag>sinkStreamOfWinResult(), wr -> FORMAT_FN_2.apply(wr.end(), tuple2(wr.result().get(tag0), wr.result().get(tag1)))));
}
use of com.hazelcast.jet.datamodel.ItemsByTag in project hazelcast by hazelcast.
the class BatchAggregateTest method groupAggregateBuilder_withComplexAggrOp_withOutputFn.
@Test
@SuppressWarnings("ConstantConditions")
public void groupAggregateBuilder_withComplexAggrOp_withOutputFn() {
// Given
GroupAggregateFixture fx = new GroupAggregateFixture();
BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn);
BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn);
BatchStageWithKey<Integer, Integer> stage2 = fx.srcStage2().groupingKey(fx.keyFn);
// When
GroupAggregateBuilder1<Integer, Integer> b = stage0.aggregateBuilder();
Tag<Integer> tag0_in = b.tag0();
Tag<Integer> tag1_in = b.add(stage1);
Tag<Integer> tag2_in = b.add(stage2);
CoAggregateOperationBuilder agb = coAggregateOperationBuilder();
Tag<Long> tag0 = agb.add(tag0_in, SUMMING);
Tag<Long> tag1 = agb.add(tag1_in, SUMMING);
Tag<Long> tag2 = agb.add(tag2_in, SUMMING);
AggregateOperation<Object[], ItemsByTag> aggrOp = agb.build();
BatchStage<Entry<Integer, Long>> aggregated = b.build(aggrOp).map(e -> {
ItemsByTag ibt = e.getValue();
return entry(e.getKey(), ibt.get(tag0) + ibt.get(tag1) + ibt.get(tag2));
});
// Then
aggregated.writeTo(sink);
execute();
Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp));
Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp));
Map<Integer, Long> expectedMap2 = input.stream().map(fx.mapFn2).collect(groupingBy(fx.keyFn, fx.collectOp));
assertEquals(streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN.apply(entry(e.getKey(), e.getValue() + expectedMap1.get(e.getKey()) + expectedMap2.get(e.getKey())))), streamToString(sinkStreamOfEntry(), FORMAT_FN));
}
use of com.hazelcast.jet.datamodel.ItemsByTag in project hazelcast by hazelcast.
the class BatchAggregateTest method aggregateBuilder_withComplexAggrOp.
@Test
public void aggregateBuilder_withComplexAggrOp() {
// Given
AggregateBuilderFixture fx = new AggregateBuilderFixture();
// When
AggregateBuilder1<Integer> b = batchStageFromInput().aggregateBuilder();
Tag<Integer> tag0_in = b.tag0();
Tag<Integer> tag1_in = b.add(fx.stage1);
Tag<Integer> tag2_in = b.add(fx.stage2);
CoAggregateOperationBuilder agb = coAggregateOperationBuilder();
Tag<Long> tag0 = agb.add(tag0_in, SUMMING);
Tag<Long> tag1 = agb.add(tag1_in, SUMMING);
Tag<Long> tag2 = agb.add(tag2_in, SUMMING);
AggregateOperation<Object[], ItemsByTag> aggrOp = agb.build();
BatchStage<ItemsByTag> aggregated = b.build(aggrOp);
// Then
aggregated.writeTo(sink);
execute();
long sum0 = input.stream().mapToLong(i -> i).sum();
assertEquals(singletonList(itemsByTag(tag0, sum0, tag1, FACTOR_1 * sum0, tag2, FACTOR_2 * sum0)), new ArrayList<>(sinkList));
}
Aggregations