use of com.hazelcast.function.FunctionEx in project hazelcast by hazelcast.
the class TestAbstractSqlConnector method fullScanReader.
@Nonnull
@Override
public Vertex fullScanReader(@Nonnull DAG dag, @Nonnull Table table_, @Nullable Expression<Boolean> predicate, @Nonnull List<Expression<?>> projection, @Nullable FunctionEx<ExpressionEvalContext, EventTimePolicy<JetSqlRow>> eventTimePolicyProvider) {
TestTable table = (TestTable) table_;
List<Object[]> rows = table.rows;
boolean streaming = table.streaming;
FunctionEx<Context, TestDataGenerator> createContextFn = ctx -> {
ExpressionEvalContext evalContext = ExpressionEvalContext.from(ctx);
EventTimePolicy<JetSqlRow> eventTimePolicy = eventTimePolicyProvider == null ? EventTimePolicy.noEventTime() : eventTimePolicyProvider.apply(evalContext);
return new TestDataGenerator(rows, predicate, projection, evalContext, eventTimePolicy, streaming);
};
ProcessorMetaSupplier pms = createProcessorSupplier(createContextFn);
return dag.newUniqueVertex(table.toString(), pms);
}
use of com.hazelcast.function.FunctionEx in project hazelcast by hazelcast.
the class TestAllTypesSqlConnector method fullScanReader.
@Nonnull
@Override
public Vertex fullScanReader(@Nonnull DAG dag, @Nonnull Table table, @Nullable Expression<Boolean> predicate, @Nonnull List<Expression<?>> projection, @Nullable FunctionEx<ExpressionEvalContext, EventTimePolicy<JetSqlRow>> eventTimePolicyProvider) {
if (eventTimePolicyProvider != null) {
throw QueryException.error("Ordering function are not supported for " + TYPE_NAME + " mappings");
}
BatchSource<JetSqlRow> source = SourceBuilder.batch("batch", ExpressionEvalContext::from).<JetSqlRow>fillBufferFn((ctx, buf) -> {
JetSqlRow row = ExpressionUtil.evaluate(predicate, projection, VALUES, ctx);
if (row != null) {
buf.add(row);
}
buf.close();
}).build();
ProcessorMetaSupplier pms = ((BatchSourceTransform<JetSqlRow>) source).metaSupplier;
return dag.newUniqueVertex(table.toString(), pms);
}
use of com.hazelcast.function.FunctionEx in project hazelcast by hazelcast.
the class CsvReadFileFnProvider method createReadFileFn.
@SuppressWarnings("unchecked")
@Nonnull
@Override
public <T> FunctionEx<Path, Stream<T>> createReadFileFn(@Nonnull FileFormat<T> format) {
CsvFileFormat<T> csvFileFormat = (CsvFileFormat<T>) format;
// Format is not Serializable
Class<?> formatClazz = csvFileFormat.clazz();
return path -> {
FileInputStream fis = new FileInputStream(path.toFile());
MappingIterator<T> iterator;
Function<T, T> projection = identity();
if (formatClazz == String[].class) {
ObjectReader reader = new CsvMapper().enable(Feature.WRAP_AS_ARRAY).readerFor(String[].class).with(CsvSchema.emptySchema().withSkipFirstDataRow(false));
iterator = reader.readValues(fis);
if (!iterator.hasNext()) {
throw new JetException("Header row missing in " + path);
}
String[] header = (String[]) iterator.next();
List<String> fieldNames = csvFileFormat.fieldNames();
if (fieldNames != null) {
projection = (Function<T, T>) createFieldProjection(header, fieldNames);
}
} else {
iterator = new CsvMapper().readerFor(formatClazz).withoutFeatures(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES).with(CsvSchema.emptySchema().withHeader()).readValues(fis);
}
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, ORDERED), false).map(projection).onClose(() -> uncheckRun(fis::close));
};
}
use of com.hazelcast.function.FunctionEx in project hazelcast by hazelcast.
the class MetricsTest method nonCooperativeProcessor.
@Test
public void nonCooperativeProcessor() {
DAG dag = new DAG();
Vertex source = dag.newVertex("source", TestProcessors.ListSource.supplier(asList(1L, 2L, 3L)));
Vertex map = dag.newVertex("map", new NonCoopTransformPSupplier((FunctionEx<Long, Long>) l -> {
Metrics.metric("mapped").increment();
return l * 10L;
}));
Vertex sink = dag.newVertex("sink", writeListP("results"));
dag.edge(between(source, map)).edge(between(map, sink));
Job job = runPipeline(dag);
JobMetricsChecker checker = new JobMetricsChecker(job);
checker.assertSummedMetricValue("mapped", 3L);
assertEquals(new HashSet<>(Arrays.asList(10L, 20L, 30L)), new HashSet<>(instance.getList("results")));
}
use of com.hazelcast.function.FunctionEx in project hazelcast by hazelcast.
the class RebalanceStreamStageTest method when_rebalanceAndMap_then_dagEdgeDistributed.
@Test
public void when_rebalanceAndMap_then_dagEdgeDistributed() {
// Given
List<Integer> input = sequence(itemCount);
StreamStage<Integer> srcStage = streamStageFromList(input);
FunctionEx<Integer, String> formatFn = i -> String.format("%04d-string", i);
// When
StreamStage<String> mapped = srcStage.rebalance().map(formatFn);
// Then
mapped.writeTo(sink);
DAG dag = p.toDag();
Edge srcToMap = dag.getInboundEdges("map").get(0);
assertTrue("Rebalancing should make the edge distributed", srcToMap.isDistributed());
assertNull("Didn't rebalance by key, the edge must not be partitioned", srcToMap.getPartitioner());
execute();
assertEquals(streamToString(input.stream(), formatFn), streamToString(sinkStreamOf(String.class), identity()));
}
Aggregations