use of com.thinkbiganalytics.kylo.spark.job.tasks.ChainableSupplier in project kylo by Teradata.
the class DefaultSparkJobContext method create.
/**
* Creates a {@code DefaultSparkJobContext} using the specified task to execute the Spark job.
*/
public static DefaultSparkJobContext create(@Nonnull final ChainableSupplier<SparkJobResponse> responseTask, @Nonnull final SparkJobCacheService cache, @Nonnull final ExecutorService executor) {
// Create context
final String id = UUID.randomUUID().toString();
final Processor<SparkJobStatus, SparkJobStatus> processor = RingBufferProcessor.create(executor, false);
final DefaultSparkJobContext context = new DefaultSparkJobContext(id, processor);
// Start task
final ChainableSupplier<SparkJobStatus> statusTask = responseTask.andThen(response -> {
context.sparkJobId = response.getId();
return response;
}).andThen(new JobStatusFunction(cache));
final CompletableFuture<SparkJobStatus> future = CompletableFuture.supplyAsync(statusTask, executor).whenComplete((response, error) -> {
if (response != null) {
processor.onNext(response);
processor.onComplete();
} else if (error != null) {
processor.onError(error);
} else {
processor.onError(new NoSuchElementException());
}
});
context.setFuture(future);
return context;
}
Aggregations