use of io.crate.operation.collect.JobCollectContext in project crate by crate.
the class JobExecutionContextTest method testFailureClosesAllSubContexts.
@Test
public void testFailureClosesAllSubContexts() throws Exception {
String localNodeId = "localNodeId";
RoutedCollectPhase collectPhase = Mockito.mock(RoutedCollectPhase.class);
Routing routing = Mockito.mock(Routing.class);
when(routing.containsShards(localNodeId)).thenReturn(false);
when(collectPhase.routing()).thenReturn(routing);
when(collectPhase.maxRowGranularity()).thenReturn(RowGranularity.DOC);
JobExecutionContext.Builder builder = new JobExecutionContext.Builder(UUID.randomUUID(), coordinatorNode, Collections.emptyList(), mock(JobsLogs.class));
JobCollectContext jobCollectContext = new JobCollectContext(collectPhase, mock(MapSideDataCollectOperation.class), localNodeId, mock(RamAccountingContext.class), new TestingBatchConsumer(), mock(SharedShardContexts.class));
TestingBatchConsumer batchConsumer = new TestingBatchConsumer();
PageDownstreamContext pageDownstreamContext = spy(new PageDownstreamContext(Loggers.getLogger(PageDownstreamContext.class), "n1", 2, "dummy", batchConsumer, PassThroughPagingIterator.oneShot(), new Streamer[] { IntegerType.INSTANCE.streamer() }, mock(RamAccountingContext.class), 1));
builder.addSubContext(jobCollectContext);
builder.addSubContext(pageDownstreamContext);
JobExecutionContext jobExecutionContext = builder.build();
Exception failure = new Exception("failure!");
jobCollectContext.close(failure);
// other contexts must be killed with same failure
verify(pageDownstreamContext, times(1)).innerKill(failure);
final Field subContexts = JobExecutionContext.class.getDeclaredField("subContexts");
subContexts.setAccessible(true);
int size = ((ConcurrentMap<Integer, ExecutionSubContext>) subContexts.get(jobExecutionContext)).size();
assertThat(size, is(0));
}
use of io.crate.operation.collect.JobCollectContext in project crate by crate.
the class SystemCollectSource method getCollector.
@Override
public CrateCollector getCollector(CollectPhase phase, BatchConsumer consumer, JobCollectContext jobCollectContext) {
RoutedCollectPhase collectPhase = (RoutedCollectPhase) phase;
// sys.operations can contain a _node column - these refs need to be normalized into literals
EvaluatingNormalizer normalizer = new EvaluatingNormalizer(functions, RowGranularity.DOC, ReplaceMode.COPY, new NodeSysReferenceResolver(nodeSysExpression), null);
final RoutedCollectPhase routedCollectPhase = collectPhase.normalize(normalizer, null);
Map<String, Map<String, List<Integer>>> locations = collectPhase.routing().locations();
String table = Iterables.getOnlyElement(locations.get(clusterService.localNode().getId()).keySet());
Supplier<CompletableFuture<? extends Iterable<?>>> iterableGetter = iterableGetters.get(table);
assert iterableGetter != null : "iterableGetter for " + table + " must exist";
boolean requiresScroll = consumer.requiresScroll();
return BatchIteratorCollectorBridge.newInstance(() -> iterableGetter.get().thenApply(dataIterable -> RowsBatchIterator.newInstance(dataIterableToRowsIterable(routedCollectPhase, requiresScroll, dataIterable), collectPhase.toCollect().size())), consumer);
}
Aggregations