use of com.hazelcast.jet.pipeline.ServiceFactory in project hazelcast by hazelcast.
the class ComputeStageImplBase method attachMapUsingPartitionedServiceAsyncBatched.
@Nonnull
@SuppressWarnings({ "unchecked", "rawtypes" })
<S, K, R, RET> RET attachMapUsingPartitionedServiceAsyncBatched(@Nonnull ServiceFactory<?, S> serviceFactory, int maxBatchSize, @Nonnull FunctionEx<? super T, ? extends K> partitionKeyFn, @Nonnull BiFunctionEx<? super S, ? super List<T>, ? extends CompletableFuture<List<R>>> mapAsyncFn) {
checkSerializable(mapAsyncFn, "mapAsyncFn");
checkSerializable(partitionKeyFn, "partitionKeyFn");
serviceFactory = moveAttachedFilesToPipeline(serviceFactory);
BiFunctionEx<? super S, ? super List<T>, ? extends CompletableFuture<List<Traverser<R>>>> flatMapAsyncFn = (s, items) -> mapAsyncFn.apply(s, items).thenApply(list -> toList(list, Traversers::singleton));
BiFunctionEx adaptedFlatMapFn = fnAdapter.adaptFlatMapUsingServiceAsyncBatchedFn(flatMapAsyncFn);
FunctionEx adaptedPartitionKeyFn = fnAdapter.adaptKeyFn(partitionKeyFn);
// Here we flatten the result from List<Traverser<R>> to Traverser<R>.
// The former is used in pipeline API, the latter in core API.
BiFunctionEx<? super S, ? super List<T>, ? extends CompletableFuture<Traverser<R>>> flattenedFn = (svc, items) -> {
// R might actually be JetEvent<R> -- we can't represent this with static types
CompletableFuture<List<Traverser<R>>> f = (CompletableFuture<List<Traverser<R>>>) adaptedFlatMapFn.apply(svc, items);
return f.thenApply(res -> traverseIterable(res).flatMap(Function.identity()));
};
PartitionedProcessorTransform processorTransform = flatMapUsingServiceAsyncBatchedPartitionedTransform(transform, "map", serviceFactory, MAX_CONCURRENT_ASYNC_BATCHES, maxBatchSize, flattenedFn, adaptedPartitionKeyFn);
return attach(processorTransform, fnAdapter);
}
use of com.hazelcast.jet.pipeline.ServiceFactory in project hazelcast by hazelcast.
the class ComputeStageImplBase method attachMapUsingServiceAsyncBatched.
@Nonnull
@SuppressWarnings({ "unchecked", "rawtypes" })
<S, R, RET> RET attachMapUsingServiceAsyncBatched(@Nonnull ServiceFactory<?, S> serviceFactory, int maxBatchSize, @Nonnull BiFunctionEx<? super S, ? super List<T>, ? extends CompletableFuture<List<Traverser<R>>>> flatMapAsyncBatchedFn) {
checkSerializable(flatMapAsyncBatchedFn, "mapAsyncBatchedFn");
serviceFactory = moveAttachedFilesToPipeline(serviceFactory);
BiFunctionEx adaptedFn = fnAdapter.adaptFlatMapUsingServiceAsyncBatchedFn(flatMapAsyncBatchedFn);
// Here we flatten the result from List<Traverser<R>> to Traverser<R>.
// The former is used in pipeline API, the latter in core API.
BiFunctionEx<? super S, ? super List<T>, ? extends CompletableFuture<Traverser<R>>> flattenedFn = (svc, items) -> {
// R might actually be JetEvent<R> -- we can't represent this with static types
CompletableFuture<List<Traverser<R>>> f = (CompletableFuture<List<Traverser<R>>>) adaptedFn.apply(svc, items);
return f.thenApply(res -> traverseIterable(res).flatMap(Function.identity()));
};
ProcessorTransform processorTransform = flatMapUsingServiceAsyncBatchedTransform(transform, "map", serviceFactory, MAX_CONCURRENT_ASYNC_BATCHES, maxBatchSize, flattenedFn);
return attach(processorTransform, fnAdapter);
}
use of com.hazelcast.jet.pipeline.ServiceFactory in project hazelcast by hazelcast.
the class PythonService method factory.
/**
* Returns a service factory configured to create a Python mapping stage.
*/
@Nonnull
static ServiceFactory<?, PythonService> factory(@Nonnull PythonServiceConfig cfg) {
cfg.validate();
ServiceFactory<PythonServiceContext, PythonService> fac = ServiceFactory.withCreateContextFn(ctx -> createContextWithRetry(ctx, cfg)).withDestroyContextFn(PythonServiceContext::destroy).withCreateServiceFn((procCtx, serviceCtx) -> new PythonService(serviceCtx)).withDestroyServiceFn(PythonService::destroy);
if (cfg.baseDir() != null) {
File baseDir = Objects.requireNonNull(cfg.baseDir());
return fac.withAttachedDirectory(baseDir.toString(), baseDir);
} else {
File handlerFile = Objects.requireNonNull(cfg.handlerFile());
return fac.withAttachedFile(handlerFile.toString(), handlerFile);
}
}
use of com.hazelcast.jet.pipeline.ServiceFactory in project hazelcast by hazelcast.
the class AsyncTransformUsingServiceBatchP_IntegrationTest method test_pipelineApi_mapNotPartitioned.
@Test
public void test_pipelineApi_mapNotPartitioned() {
Pipeline p = Pipeline.create();
p.readFrom(Sources.mapJournal(journaledMap, START_FROM_OLDEST, EventJournalMapEvent::getNewValue, alwaysTrue())).withoutTimestamps().mapUsingServiceAsyncBatched(serviceFactory, 128, transformNotPartitionedFn(i -> i + "-1")).setLocalParallelism(2).writeTo(Sinks.list(sinkList));
instance().getJet().newJob(p, jobConfig);
assertResult(i -> Stream.of(i + "-1"), NUM_ITEMS);
}
use of com.hazelcast.jet.pipeline.ServiceFactory in project hazelcast by hazelcast.
the class AsyncTransformUsingServiceBatchP_IntegrationTest method before.
@Before
public void before() {
journaledMap = instance().getMap(randomMapName("journaledMap"));
journaledMap.putAll(IntStream.range(0, NUM_ITEMS).boxed().collect(toMap(i -> i, i -> i)));
sinkList = instance().getList(randomMapName("sinkList"));
jobConfig = new JobConfig().setProcessingGuarantee(EXACTLY_ONCE).setSnapshotIntervalMillis(0);
serviceFactory = sharedService(pctx -> Executors.newFixedThreadPool(8), ExecutorService::shutdown);
}
Aggregations